diff --git a/alibabacloud_oss_v2/_client.py b/alibabacloud_oss_v2/_client.py
index 3f9b659..aa75c16 100644
--- a/alibabacloud_oss_v2/_client.py
+++ b/alibabacloud_oss_v2/_client.py
@@ -20,7 +20,7 @@
from .types import (
Retryer,
CredentialsProvider,
- HttpClient,
+ HttpClient, AsyncHttpClient,
HttpRequest,
HttpResponse,
SigningContext,
@@ -100,7 +100,7 @@ def __init__(
retryer: Optional[Retryer] = None,
signer: Optional[Signer] = None,
credentials_provider: Optional[CredentialsProvider] = None,
- http_client: Optional[Union[HttpClient]] = None,
+ http_client: Optional[Union[HttpClient, AsyncHttpClient]] = None,
address_style: Optional[AddressStyle] = None,
readwrite_timeout: Optional[Union[int, float]] = None,
response_handlers: Optional[List] = None,
@@ -211,7 +211,7 @@ def verify_operation(self, op_input: OperationInput, options: _Options) -> None:
def apply_operation(self, options: _Options, op_input: OperationInput) -> None:
"""apply operation"""
self._apply_operation_options(options) # pylint: disable=no-member
- _apply_operation_metadata(op_input, options)
+ self._apply_operation_metadata(op_input, options)
def build_request_context(self, op_input: OperationInput, options: _Options, inner: _InnerOptions
@@ -377,6 +377,15 @@ def service_error_response_handler(response: HttpResponse) -> None:
options.response_handlers = handlers
+ def _apply_operation_metadata(self, op_input: OperationInput, options: _Options) -> None:
+ handlers = op_input.op_metadata.get('opm-response-handler', None)
+ if handlers is not None:
+ options.response_handlers.extend(handlers)
+
+ stream = op_input.op_metadata.get('response-stream', None)
+ if stream is not None:
+ options.response_stream = stream
+
def _sent_request(self, op_input: OperationInput, options: _Options) -> OperationOutput:
context = self.build_request_context(op_input, options, self._inner)
response = self._sent_http_request(context, options)
@@ -592,15 +601,6 @@ def _resolve_cloud_box(config: Config, options: _Options) -> None:
options.product = defaults.CLOUD_BOX_PRODUCT
-def _apply_operation_metadata(op_input: OperationInput, options: _Options) -> None:
- handlers = op_input.op_metadata.get('opm-response-handler', None)
- if handlers is not None:
- options.response_handlers.extend(handlers)
-
- stream = op_input.op_metadata.get('response-stream', None)
- if stream is not None:
- options.response_stream = stream
-
def _build_url(op_input: OperationInput, options: _Options) -> str:
host = ""
diff --git a/alibabacloud_oss_v2/aio/__init__.py b/alibabacloud_oss_v2/aio/__init__.py
new file mode 100644
index 0000000..8f01b6a
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/__init__.py
@@ -0,0 +1 @@
+from .client import AsyncClient
diff --git a/alibabacloud_oss_v2/aio/_aioclient.py b/alibabacloud_oss_v2/aio/_aioclient.py
new file mode 100644
index 0000000..b4a077a
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/_aioclient.py
@@ -0,0 +1,399 @@
+import copy
+import time
+import asyncio
+import time
+import base64
+import re
+import inspect
+from typing import Any, Optional, Dict, AsyncIterator, AsyncIterable
+from urllib.parse import urlencode, quote
+from xml.etree import ElementTree as ET
+from .. import exceptions
+from .. import serde
+from .. import utils
+from .. import serde
+from ..credentials import AnonymousCredentialsProvider
+from ..config import Config
+from .._client import (
+ _ClientImplMixIn,
+ _Options,
+ _InnerOptions,
+ _build_url
+)
+from ..types import (
+ AsyncHttpResponse,
+ SigningContext,
+ OperationInput,
+ OperationOutput,
+ HttpRequest,
+ BodyType
+)
+from .transport import AioHttpClient
+from . import aio_utils
+
+class _AsyncMarkedBody:
+ def __init__(
+ self,
+ body: BodyType,
+ ) -> None:
+ self._body = body
+ self._io_curr: int = 0
+ self._is_fileobj = False
+ if body is None:
+ self._seekable = True
+ elif isinstance(body, aio_utils.TeeAsyncIterator):
+ self._seekable = body.seekable()
+ elif utils.is_fileobj(body):
+ self._seekable = utils.is_seekable(body)
+ self._is_fileobj = True
+ elif isinstance(body, AsyncIterator):
+ self._seekable = False
+ elif isinstance(body, (str, bytes, AsyncIterable)):
+ self._seekable = True
+ else:
+ self._seekable = False
+
+ def is_seekable(self) -> bool:
+ """'is seekable
+ """
+ return self._seekable
+
+ def mark(self) -> None:
+ """Set the current marked position in the stream.
+ """
+ if self.is_seekable() is False:
+ return
+
+ if self._is_fileobj:
+ self._io_curr = self._body.tell()
+
+ def reset(self) -> None:
+ """Resets the buffer to the marked position.
+ """
+ if self.is_seekable() is False:
+ return
+
+ if isinstance(self._body, aio_utils.TeeAsyncIterator):
+ self._body.reset()
+
+ if self._is_fileobj:
+ self._body.seek(self._io_curr, 0)
+
+class _AsyncClientImpl(_ClientImplMixIn):
+ """ASync API Client for common API."""
+
+ def __init__(self, config: Config, **kwargs) -> None:
+ options, inner = self.resolve_config(config)
+ self.resolve_kwargs(options, **kwargs)
+
+ self._config = config
+ self._options = options
+ self._inner = inner
+
+ async def close(self):
+ """_summary_
+ """
+ if self._options.http_client is not None:
+ await self._options.http_client.close()
+
+ async def invoke_operation(self, op_input: OperationInput, **kwargs) -> OperationOutput:
+ """Common class interface invoice operation
+
+ Args:
+ op_input (OperationInput): _description_
+
+ Raises:
+ exceptions.OperationError: _description_
+
+ Returns:
+ OperationOutput: _description_
+ """
+
+ options = copy.copy(self._options)
+ self.resolve_operation_kwargs(options, **kwargs)
+ self.apply_operation(options, op_input)
+
+ try:
+ self.verify_operation(op_input, options)
+ output = await self._sent_request(op_input, options)
+ except Exception as err:
+ raise exceptions.OperationError(
+ name=op_input.op_name,
+ error=err,
+ )
+
+ return output
+
+ def _resolve_httpclient(self, config: Config, options: _Options) -> None:
+ """httpclient"""
+ if options.http_client:
+ return
+
+ kwargs: Dict[str, Any] = {}
+
+ if bool(config.insecure_skip_verify):
+ kwargs["insecure_skip_verify"] = True
+
+ if bool(config.enabled_redirect):
+ kwargs["enabled_redirect"] = True
+
+ if config.connect_timeout:
+ kwargs["connect_timeout"] = config.connect_timeout
+
+ if config.readwrite_timeout:
+ kwargs["readwrite_timeout"] = config.readwrite_timeout
+
+ if config.proxy_host:
+ kwargs["proxy_host"] = config.proxy_host
+
+ options.http_client = AioHttpClient(**kwargs)
+
+
+ def _apply_operation_options(self, options: _Options) -> None:
+ # response handler
+ handlers = []
+
+ async def service_error_response_handler(response: AsyncHttpResponse) -> None:
+ """ check service error """
+ if response.status_code // 100 == 2:
+ return
+
+ if not response.is_stream_consumed:
+ await response.read()
+
+ raise await _to_service_error(response)
+
+ # insert service error responsed handler first
+ handlers.append(service_error_response_handler)
+
+ handlers.extend(options.response_handlers)
+
+ options.response_handlers = handlers
+
+ def _apply_operation_metadata(self, op_input: OperationInput, options: _Options) -> None:
+ handlers = op_input.op_metadata.get('opm-response-handler', None)
+ if handlers is not None:
+ options.response_handlers.extend(handlers)
+
+
+ def _build_request_context(self, op_input: OperationInput, options: _Options, inner: _InnerOptions
+ ) -> SigningContext:
+ """build request context
+ """
+ # host & path
+ url = _build_url(op_input, options)
+
+ # queries
+ if op_input.parameters is not None:
+ query = urlencode(op_input.parameters, quote_via=quote)
+ if len(query) > 0:
+ url = url + "?" + query
+
+ # build http request
+ request = HttpRequest(method=op_input.method, url=url)
+
+ # headers
+ request.headers.update(op_input.headers or {})
+
+ request.headers.update({'User-Agent': inner.user_agent})
+
+ # body
+ body = op_input.body or b''
+
+ # body tracker
+ if op_input.op_metadata is not None:
+ tracker = op_input.op_metadata.get("opm-request-body-tracker", None)
+ if tracker is not None:
+ writers = []
+ for t in tracker:
+ if hasattr(t, 'write'):
+ writers.append(t)
+ if len(writers) > 0:
+ body = aio_utils.TeeAsyncIterator.from_source(body, writers)
+
+ request.body = body
+
+ # signing context
+ context = SigningContext(
+ product=options.product,
+ region=options.region,
+ bucket=op_input.bucket,
+ key=op_input.key,
+ request=request,
+ )
+
+ if utils.safety_str(options.auth_method) == 'query':
+ context.auth_method_query = True
+
+ oss_date = request.headers.get('x-oss-date', None)
+ if oss_date is not None:
+ context.signing_time = serde.deserialize_httptime(oss_date)
+ if (expiration_time := op_input.op_metadata.get('expiration_time', None)) is not None:
+ context.expiration_time = expiration_time
+
+ context.sub_resource = op_input.op_metadata.get("sub-resource", [])
+
+ return context
+
+ async def _sent_request(self, op_input: OperationInput, options: _Options) -> OperationOutput:
+
+ context = self._build_request_context(op_input, options, self._inner)
+ response = await self._sent_http_request(context, options)
+
+ output = OperationOutput(
+ status=response.reason,
+ status_code=response.status_code,
+ headers=response.headers,
+ op_input=op_input,
+ http_response=response
+ )
+
+ # save other info by Metadata filed
+ # output.op_metadata
+
+ # update clock offset
+
+ return output
+
+ async def _sent_http_request(self, context: SigningContext, options: _Options) -> AsyncHttpResponse:
+ request = context.request
+ retryer = options.retryer
+ max_attempts = self.retry_max_attempts(options)
+
+ # operation timeout
+ dealline = None
+ if isinstance(options.operation_timeout, (int, float)):
+ dealline = time.time() + options.operation_timeout
+
+ # Mark body
+ marked_body = _AsyncMarkedBody(request.body)
+ marked_body.mark()
+
+ reset_time = context.signing_time is None
+ error: Optional[Exception] = None
+ response: AsyncHttpResponse = None
+ for tries in range(max_attempts):
+ if tries > 0:
+ try:
+ marked_body.reset()
+ except: # pylint: disable=bare-except
+ # if meets reset error, just ignores, and retures last error
+ break
+
+ if reset_time:
+ context.signing_time = None
+
+ dealy = retryer.retry_delay(tries, error)
+ await asyncio.sleep(dealy)
+
+ # operation timeout
+ if dealline is not None and (time.time() > dealline):
+ break
+
+ try:
+ error = None
+ response = await self._sent_http_request_once(context, options)
+ break
+ except Exception as e:
+ error = e
+
+ # operation timeout
+ if dealline is not None and (time.time() > dealline):
+ break
+
+ if marked_body.is_seekable() is False:
+ break
+
+ if not retryer.is_error_retryable(error):
+ break
+
+ if error is not None:
+ raise error
+
+ return response
+
+ async def _sent_http_request_once(self, context: SigningContext, options: _Options) -> AsyncHttpResponse:
+ # sign request
+ if not isinstance(options.credentials_provider, AnonymousCredentialsProvider):
+ try:
+ cred = options.credentials_provider.get_credentials()
+ except Exception as e:
+ raise exceptions.CredentialsFetchError(error=e)
+
+ if cred is None or not cred.has_keys():
+ raise exceptions.CredentialsEmptyError()
+
+ # update credentials
+ context.credentials = cred
+
+ options.signer.sign(context)
+
+ # send
+ send_kwargs = {}
+ #if options.response_stream is not None:
+ # send_kwargs['stream'] = options.response_stream
+
+ response = await options.http_client.send(context.request, **send_kwargs)
+
+ # response handler
+ for h in options.response_handlers:
+ if inspect.iscoroutinefunction(h):
+ await h(response)
+ else:
+ h(response)
+
+ return response
+
+
+async def _to_service_error(response: AsyncHttpResponse) -> exceptions.ServiceError:
+ timestamp = serde.deserialize_httptime(response.headers.get('Date'))
+ content = response.content or b''
+ await response.close()
+
+ error_fileds = {}
+ code = 'BadErrorResponse'
+ message = ''
+ ec = ''
+ request_id = ''
+ err_body = b''
+ try:
+ err_body = content
+ if len(err_body) == 0:
+ err_body = base64.b64decode(
+ response.headers.get('x-oss-err', ''))
+ root = ET.fromstring(err_body)
+ if root.tag == 'Error':
+ for child in root:
+ error_fileds[child.tag] = child.text
+ message = error_fileds.get('Message', '')
+ code = error_fileds.get('Code', '')
+ ec = error_fileds.get('EC', '')
+ request_id = error_fileds.get('RequestId', '')
+ else:
+ message = f'Expect root node Error, but get {root.tag}.'
+ except ET.ParseError as e:
+ errstr = err_body.decode()
+ if '' in errstr and '' in errstr:
+ m = re.search('(.*)', errstr)
+ if m:
+ code = m.group(1)
+ m = re.search('(.*)', errstr)
+ if m:
+ message = m.group(1)
+ if len(message) == 0:
+ message = f'Failed to parse xml from response body due to: {str(e)}. With part response body {err_body[:256]}.'
+ except Exception as e:
+ message = f'The body of the response was not readable, due to : {str(e)}.'
+
+ return exceptions.ServiceError(
+ status_code=response.status_code,
+ code=code,
+ message=message,
+ request_id=request_id or response.headers.get('x-oss-request-id', ''),
+ ec=ec or response.headers.get('x-oss-ec', ''),
+ timestamp=timestamp,
+ request_target=f'{response.request.method} {response.request.url}',
+ snapshot=content,
+ headers=response.headers,
+ error_fileds=error_fileds
+ )
diff --git a/alibabacloud_oss_v2/aio/aio_utils.py b/alibabacloud_oss_v2/aio/aio_utils.py
new file mode 100644
index 0000000..9d688f2
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/aio_utils.py
@@ -0,0 +1,358 @@
+"""utils for stream wrapper"""
+import os
+from typing import Optional, IO, List, AsyncIterable, Any, AsyncIterator
+from .. import utils
+from ..types import AsyncStreamBody, AsyncHttpResponse
+from ..exceptions import ResponseNotReadError
+
+# pylint: disable=no-member
+# pylint: disable=protected-access
+def is_seekable_io(fileobj):
+ """is seekable io
+ """
+ if hasattr(fileobj, 'seekable'):
+ return fileobj.seekable()
+
+ if hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'):
+ try:
+ fileobj.seek(0, os.SEEK_CUR)
+ return True
+ except OSError:
+ return False
+
+ return False
+
+
+class TeeAsyncIterator:
+ """A AsyncIterator that writes to w what it reads from source
+ """
+
+ def __aiter__(self):
+ return self.aiter_bytes()
+
+ async def __anext__(self):
+ d = await self.anext()
+ if self._writers is not None:
+ for w in self._writers:
+ w.write(d)
+ return d
+
+ def seekable(self):
+ """Is there a file pointer offset
+ """
+ return self._seekable
+
+ def reset(self) -> None:
+ """Resets the buffer to the marked position.
+ """
+ if self._writers is not None:
+ for w in self._writers:
+ if hasattr(self._writers, 'reset'):
+ w.reset()
+
+ @staticmethod
+ def from_source(source: Any, writers: List[Any], **kwargs: Any) -> "TeeAsyncIterator":
+ """Converts source to TeeAsyncIterator
+
+ Args:
+ source (Any): what it reads from
+ writers (List[Any]): what it writes to
+
+ Raises:
+ TypeError: If the type of source is is not supported, raises error.
+
+ Returns:
+ TeeAsyncIterator: A AsyncIterator that writes to w what it reads from source
+ """
+
+ block_size = kwargs.get("block_size", 64 * 1024)
+
+ if isinstance(source, str):
+ return _TeeAsyncIteratorStr(source, writers, block_size)
+
+ if isinstance(source, bytes):
+ return _TeeAsyncIteratorBytes(source, writers, block_size)
+
+ # file-like object
+ if hasattr(source, 'seek') and hasattr(source, 'read'):
+ data_len = utils.guess_content_length(source)
+ if data_len is not None:
+ return _TeeAsyncIteratorIOLen(source, data_len, writers, block_size)
+ return _TeeAsyncIteratorIO(source, writers, block_size)
+
+ if isinstance(source, AsyncIterable):
+ return _TeeAsyncIteratorIter(source, writers)
+
+ raise TypeError(
+ f'Invalid type for body. Expected str, bytes, file-like object, got {type(source)}')
+
+
+class _TeeAsyncIteratorStr(TeeAsyncIterator):
+ """Iterator str information
+ """
+
+ def __init__(
+ self,
+ data: str,
+ writers: List[Any],
+ block_size: Optional[int] = None
+ ) -> None:
+ self._data = data
+ self._writers = writers
+ self._block_size = block_size
+ self._offset = 0
+ self._total = 0
+ self._seekable = True
+ self._content = None
+
+ def __len__(self):
+ return len(self._data)
+
+ def aiter_bytes(self):
+ """iter bytes
+ """
+ self._content = self._data.encode()
+ self._total = len(self._content)
+ self._offset = 0
+ return self
+
+ async def anext(self):
+ """Next data
+ """
+ if self._offset >= self._total:
+ raise StopAsyncIteration
+
+ remains = self._total - self._offset
+ remains = min(self._block_size, remains)
+
+ ret = self._content[self._offset: self._offset + remains]
+ self._offset += remains
+
+ return ret
+
+
+class _TeeAsyncIteratorBytes(TeeAsyncIterator):
+ """Iterator bytes information
+ """
+
+ def __init__(
+ self,
+ data: bytes,
+ writers: List[Any],
+ block_size: Optional[int] = None
+ ) -> None:
+ self._data = data
+ self._writers = writers
+ self._block_size = block_size
+ self._offset = 0
+ self._total = 0
+ self._seekable = True
+ self._content = None
+
+ def __len__(self):
+ return len(self._data)
+
+ def aiter_bytes(self):
+ """iter bytes
+ """
+ self._content = self._data
+ self._total = len(self._content)
+ self._offset = 0
+ return self
+
+ async def anext(self):
+ """Next data
+ """
+ if self._offset >= self._total:
+ raise StopAsyncIteration
+
+ remains = self._total - self._offset
+ remains = min(self._block_size, remains)
+
+ ret = self._content[self._offset: self._offset + remains]
+ self._offset += remains
+
+ return ret
+
+class _TeeAsyncIteratorIOLen(TeeAsyncIterator):
+ """Iterator io len information
+ """
+
+ def __init__(
+ self,
+ data: IO,
+ total: int,
+ writers: List[Any],
+ block_size: Optional[int] = None
+ ) -> None:
+ self._data = data
+ self._total = total
+ self._writers = writers
+ self._block_size = block_size
+ seekable = is_seekable_io(data)
+ self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR)
+ self._seekable = seekable
+ self._check_type_done = False
+ self._do_cast = False
+
+ def __len__(self):
+ return self._total
+
+ def aiter_bytes(self):
+ """iter bytes
+ """
+ if self._seekable:
+ self._data.seek(self._start_offset, os.SEEK_SET)
+
+ return self
+
+ async def anext(self):
+ """Next data
+ """
+ d = self._data.read(self._block_size)
+
+ if d:
+ if not self._check_type_done:
+ self._check_type_done = True
+ if isinstance(d, str):
+ self._do_cast = True
+
+ if self._do_cast:
+ return d.encode()
+ else:
+ return d
+
+ raise StopAsyncIteration
+
+class _TeeAsyncIteratorIO(TeeAsyncIterator):
+ """Iterator io information
+ """
+
+ def __init__(
+ self,
+ data: IO,
+ writers: List[Any],
+ block_size: Optional[int] = None
+ ) -> None:
+ self._data = data
+ self._writers = writers
+ self._block_size = block_size
+
+ seekable = is_seekable_io(data)
+ self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR)
+ self._total = utils.guess_content_length(data)
+ self._seekable = seekable
+ self._check_type_done = False
+ self._do_cast = False
+
+ if self._total is not None:
+ setattr(self, '__len__', lambda x: x._total)
+
+ def aiter_bytes(self):
+ """iter bytes
+ """
+ if self._seekable:
+ self._data.seek(self._start_offset, os.SEEK_SET)
+
+ return self
+
+ async def anext(self):
+ """Next data
+ """
+ d = self._data.read(self._block_size)
+
+ if d:
+ if not self._check_type_done:
+ self._check_type_done = True
+ if isinstance(d, str):
+ self._do_cast = True
+
+ if self._do_cast:
+ return d.encode()
+ else:
+ return d
+
+ raise StopAsyncIteration
+
+class _TeeAsyncIteratorIter(TeeAsyncIterator):
+ """Iterator iter information
+ """
+
+ def __init__(
+ self,
+ data: AsyncIterable[bytes],
+ writers: List[Any],
+ ) -> None:
+ self._data = data
+ self._writers = writers
+ self._iter = None
+ self._seekable = not isinstance(self._data, AsyncIterator)
+ self._check_type_done = False
+ self._cast_func = None
+
+ def aiter_bytes(self):
+ """iter bytes
+ """
+ if isinstance(self._data, AsyncIterator):
+ self._iter = self._data
+ else:
+ self._iter = iter(self._data)
+ return self
+
+ async def anext(self):
+ """Next data
+ """
+ return self._to_bytes(await anext(self._iter))
+
+ def _to_bytes(self, d) -> bytes:
+ if d is None:
+ return d
+ if not self._check_type_done:
+ self._check_type_done = True
+ if isinstance(d, str):
+ self._cast_func = lambda x: x.encode()
+
+ if self._cast_func:
+ return self._cast_func(d)
+
+ return d
+
+class AsyncStreamBodyReader(AsyncStreamBody):
+ """
+ A StreamBodyReader that convert AsyncHttpResponse type to AsyncStreamBody type.
+ """
+ def __init__(
+ self,
+ response: AsyncHttpResponse,
+ ) -> None:
+ self._response = response
+
+ async def __aenter__(self) -> "AsyncStreamBodyReader":
+ await self._response.__aenter__()
+ return self
+
+ async def __aexit__(self, *args: Any) -> None:
+ await self._response.__exit__(*args)
+
+ @property
+ def is_closed(self) -> bool:
+ return self._response.is_closed
+
+ @property
+ def is_stream_consumed(self) -> bool:
+ return self._response.is_stream_consumed
+
+ @property
+ def content(self) -> bytes:
+ if not self._response.is_stream_consumed:
+ raise ResponseNotReadError()
+ return self._response.content
+
+ async def read(self) -> bytes:
+ return await self._response.read()
+
+ async def close(self) -> None:
+ await self._response.close()
+
+ async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]:
+ return self._response.iter_bytes(**kwargs)
diff --git a/alibabacloud_oss_v2/aio/client.py b/alibabacloud_oss_v2/aio/client.py
new file mode 100644
index 0000000..12af6dd
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/client.py
@@ -0,0 +1,682 @@
+# pylint: disable=line-too-long
+"""Client used to interact with **Alibaba Cloud Object Storage Service (OSS)**."""
+from typing import Optional, Type
+from types import TracebackType
+from ..config import Config
+from ..types import OperationInput, OperationOutput
+from .. import models
+from .. import exceptions
+from ._aioclient import _AsyncClientImpl
+from . import operations
+
+class AsyncClient:
+ """AsyncClient
+ """
+
+ def __init__(self, config: Config, **kwargs) -> None:
+ """Initialize Client
+
+ Args:
+ config (Config): _description_
+ """
+ self._client = _AsyncClientImpl(config, **kwargs)
+
+ def __repr__(self) -> str:
+ return ""
+
+ async def __aenter__(self) -> "AsyncClient":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+
+ async def close(self):
+ """_summary_
+ """
+ await self._client.close()
+
+ async def invoke_operation(self, op_input: OperationInput, **kwargs
+ ) -> OperationOutput:
+ """invoke operation
+
+ Args:
+ op_input (OperationInput): _description_
+
+ Returns:
+ OperationOutput: _description_
+ """
+ return await self._client.invoke_operation(op_input, **kwargs)
+
+ # sevice
+ async def list_buckets(self, request: models.ListBucketsRequest, **kwargs
+ ) -> models.ListBucketsResult:
+ """
+ Lists all buckets that belong to your Alibaba Cloud account.
+
+ Args:
+ request (ListBucketsRequest): Request parameters for ListBuckets operation.
+
+ Returns:
+ ListBucketsResult: Response result for ListBuckets operation.
+ """
+
+ return await operations.list_buckets(self._client, request, **kwargs)
+
+ # region
+ async def describe_regions(self, request: models.DescribeRegionsRequest, **kwargs
+ ) -> models.DescribeRegionsResult:
+ """
+ Queries the endpoints of all regions supported by Object Storage Service (OSS)
+ or a specific region, including public endpoints, internal endpoints,
+ and acceleration endpoints.
+
+ Args:
+ request (DescribeRegionsRequest): Request parameters for DescribeRegions operation.
+
+ Returns:
+ DescribeRegionsResult: Response result for DescribeRegions operation.
+ """
+
+ return await operations.describe_regions(self._client, request, **kwargs)
+
+ # bucket
+ async def put_bucket(self, request: models.PutBucketRequest, **kwargs
+ ) -> models.PutBucketResult:
+ """
+ Creates a bucket.
+
+ Args:
+ request (PutBucketRequest): Request parameters for PutBucket operation.
+
+ Returns:
+ PutBucketResult: Response result for PutBucket operation.
+ """
+
+ return await operations.put_bucket(self._client, request, **kwargs)
+
+ async def delete_bucket(self, request: models.DeleteBucketRequest, **kwargs
+ ) -> models.DeleteBucketResult:
+ """
+ Deletes a bucket.
+
+ Args:
+ request (DeleteBucketRequest): Request parameters for DeleteBucket operation.
+
+ Returns:
+ DeleteBucketResult: Response result for DeleteBucket operation.
+ """
+
+ return await operations.delete_bucket(self._client, request, **kwargs)
+
+ async def list_objects(self, request: models.ListObjectsRequest, **kwargs
+ ) -> models.ListObjectsResult:
+ """
+ Lists information about all objects in an Object Storage Service (OSS) bucket.
+
+ Args:
+ request (ListObjectsRequest): Request parameters for ListObjects operation.
+
+ Returns:
+ ListObjectsResult: Response result for ListObjects operation.
+ """
+
+ return await operations.list_objects(self._client, request, **kwargs)
+
+ async def put_bucket_acl(self, request: models.PutBucketAclRequest, **kwargs
+ ) -> models.PutBucketAclResult:
+ """
+ You can call this operation to configure or modify the ACL of a bucket.
+
+ Args:
+ request (PutBucketAclRequest): Request parameters for PutBucketAcl operation.
+
+ Returns:
+ PutBucketAclResult: Response result for PutBucketAcl operation.
+ """
+
+ return await operations.put_bucket_acl(self._client, request, **kwargs)
+
+ async def get_bucket_acl(self, request: models.GetBucketAclRequest, **kwargs
+ ) -> models.GetBucketAclResult:
+ """
+ You can call this operation to query the ACL of a bucket.
+ Only the bucket owner can query the ACL of the bucket.
+
+ Args:
+ request (GetBucketAclRequest): Request parameters for GetBucketAcl operation.
+
+ Returns:
+ GetBucketAclResult: Response result for GetBucketAcl operation.
+ """
+
+ return await operations.get_bucket_acl(self._client, request, **kwargs)
+
+ async def list_objects_v2(self, request: models.ListObjectsV2Request, **kwargs
+ ) -> models.ListObjectsV2Result:
+ """
+ Lists all objects in a bucket.
+
+ Args:
+ request (ListObjectsV2Request): Request parameters for ListObjectsV2 operation.
+
+ Returns:
+ ListObjectsV2Result: Response result for ListObjectsV2 operation.
+ """
+
+ return await operations.list_objects_v2(self._client, request, **kwargs)
+
+ async def get_bucket_stat(self, request: models.GetBucketStatRequest, **kwargs
+ ) -> models.GetBucketStatResult:
+ """
+ GetBucketStat Queries the storage capacity of a specified bucket and
+ the number of objects that are stored in the bucket.
+
+ Args:
+ request (GetBucketStatRequest): Request parameters for GetBucketStat operation.
+
+ Returns:
+ GetBucketStatResult: Response result for GetBucketStat operation.
+ """
+
+ return await operations.get_bucket_stat(self._client, request, **kwargs)
+
+ async def get_bucket_location(self, request: models.GetBucketLocationRequest, **kwargs
+ ) -> models.GetBucketLocationResult:
+ """
+ GetBucketLocation Queries the region of an Object Storage Service (OSS) bucket.
+
+ Args:
+ request (GetBucketLocationRequest): Request parameters for GetBucketLocation operation.
+
+ Returns:
+ GetBucketLocationResult: Response result for GetBucketLocation operation.
+ """
+
+ return await operations.get_bucket_location(self._client, request, **kwargs)
+
+ async def get_bucket_info(self, request: models.GetBucketInfoRequest, **kwargs
+ ) -> models.GetBucketInfoResult:
+ """
+ GetBucketInfo Queries information about a bucket.
+
+ Args:
+ request (GetBucketInfoRequest): Request parameters for GetBucketInfo operation.
+
+ Returns:
+ GetBucketInfoResult: Response result for GetBucketInfo operation.
+ """
+
+ return await operations.get_bucket_info(self._client, request, **kwargs)
+
+ async def put_bucket_versioning(self, request: models.PutBucketVersioningRequest, **kwargs
+ ) -> models.PutBucketVersioningResult:
+ """
+ PutBucketVersioning Configures the versioning state for a bucket.
+
+ Args:
+ request (PutBucketVersioningRequest): Request parameters for PutBucketVersioning operation.
+
+ Returns:
+ PutBucketVersioningResult: Response result for PutBucketVersioning operation.
+ """
+
+ return await operations.put_bucket_versioning(self._client, request, **kwargs)
+
+ async def get_bucket_versioning(self, request: models.GetBucketVersioningRequest, **kwargs
+ ) -> models.GetBucketVersioningResult:
+ """
+ GetBucketVersioning You can call this operation to query the versioning state of a bucket.
+
+ Args:
+ request (GetBucketVersioningRequest): Request parameters for GetBucketVersioning operation.
+
+ Returns:
+ GetBucketVersioningResult: Response result for GetBucketVersioning operation.
+ """
+
+ return await operations.get_bucket_versioning(self._client, request, **kwargs)
+
+ async def list_object_versions(self, request: models.ListObjectVersionsRequest, **kwargs
+ ) -> models.ListObjectVersionsResult:
+ """
+ ListObjectVersions Lists the versions of all objects in a bucket, including delete markers.
+
+ Args:
+ request (ListObjectVersionsRequest): Request parameters for ListObjectVersions operation.
+
+ Returns:
+ ListObjectVersionsResult: Response result for ListObjectVersions operation.
+ """
+
+ return await operations.list_object_versions(self._client, request, **kwargs)
+
+ # object
+ async def put_object(self, request: models.PutObjectRequest, **kwargs
+ ) -> models.PutObjectResult:
+ """
+ Uploads objects.
+
+ Args:
+ request (PutObjectRequest): Request parameters for PutObject operation.
+
+ Returns:
+ PutObjectResult: Response result for PutObject operation.
+ """
+
+ return await operations.put_object(self._client, request, **kwargs)
+
+ async def get_object(self, request: models.GetObjectRequest, **kwargs
+ ) -> models.GetObjectResult:
+ """
+ Queries an object. To call this operation, you must have read permissions on the object.
+
+ Args:
+ request (GetObjectRequest): Request parameters for GetObject operation.
+
+ Returns:
+ GetObjectResult: Response result for GetObject operation.
+ """
+
+ return await operations.get_object(self._client, request, **kwargs)
+
+ async def copy_object(self, request: models.CopyObjectRequest, **kwargs
+ ) -> models.CopyObjectResult:
+ """
+ Copies objects within a bucket or between buckets in the same region.
+
+ Args:
+ request (CopyObjectRequest): Request parameters for CopyObject operation.
+
+ Returns:
+ CopyObjectResult: Response result for CopyObject operation.
+ """
+
+ return await operations.copy_object(self._client, request, **kwargs)
+
+ async def append_object(self, request: models.AppendObjectRequest, **kwargs
+ ) -> models.AppendObjectResult:
+ """
+ Uploads an object by appending the object to an existing object.
+ Objects created by using the AppendObject operation are appendable objects.
+
+ Args:
+ request (AppendObjectRequest): Request parameters for AppendObject operation.
+
+ Returns:
+ AppendObjectResult: Response result for AppendObject operation.
+ """
+
+ return await operations.append_object(self._client, request, **kwargs)
+
+ async def delete_object(self, request: models.DeleteObjectRequest, **kwargs
+ ) -> models.DeleteObjectResult:
+ """
+ Deletes an object.
+
+ Args:
+ request (DeleteObjectRequest): Request parameters for DeleteObject operation.
+
+ Returns:
+ DeleteObjectResult: Response result for DeleteObject operation.
+ """
+
+ return await operations.delete_object(self._client, request, **kwargs)
+
+ async def delete_multiple_objects(self, request: models.DeleteMultipleObjectsRequest, **kwargs
+ ) -> models.DeleteMultipleObjectsResult:
+ """
+ Deletes multiple objects from a bucket.
+
+ Args:
+ request (DeleteMultipleObjectsRequest): Request parameters for DeleteMultipleObjects operation.
+
+ Returns:
+ DeleteMultipleObjectsResult: Response result for DeleteMultipleObjects operation.
+ """
+
+ return await operations.delete_multiple_objects(self._client, request, **kwargs)
+
+ async def head_object(self, request: models.HeadObjectRequest, **kwargs
+ ) -> models.HeadObjectResult:
+ """
+ Queries information about the object in a bucket.
+
+ Args:
+ request (HeadObjectRequest): Request parameters for HeadObject operation.
+
+ Returns:
+ HeadObjectResult: Response result for HeadObject operation.
+ """
+
+ return await operations.head_object(self._client, request, **kwargs)
+
+ async def get_object_meta(self, request: models.GetObjectMetaRequest, **kwargs
+ ) -> models.GetObjectMetaResult:
+ """
+ Queries the metadata of an object, including ETag, Size, and LastModified.
+
+ Args:
+ request (GetObjectMetaRequest): Request parameters for GetObjectMeta operation.
+
+ Returns:
+ GetObjectMetaResult: Response result for GetObjectMeta operation.
+ """
+
+ return await operations.get_object_meta(self._client, request, **kwargs)
+
+ async def restore_object(self, request: models.RestoreObjectRequest, **kwargs
+ ) -> models.RestoreObjectResult:
+ """
+ Restores Archive, Cold Archive, or Deep Cold Archive objects.
+
+ Args:
+ request (RestoreObjectRequest): Request parameters for RestoreObject operation.
+
+ Returns:
+ RestoreObjectResult: Response result for RestoreObject operation.
+ """
+
+ return await operations.restore_object(self._client, request, **kwargs)
+
+ async def put_object_acl(self, request: models.PutObjectAclRequest, **kwargs
+ ) -> models.PutObjectAclResult:
+ """
+ You can call this operation to modify the access control list (ACL) of an object.
+
+ Args:
+ request (PutObjectAclRequest): Request parameters for PutObjectAcl operation.
+
+ Returns:
+ PutObjectAclResult: Response result for PutObjectAcl operation.
+ """
+
+ return await operations.put_object_acl(self._client, request, **kwargs)
+
+ async def get_object_acl(self, request: models.GetObjectAclRequest, **kwargs
+ ) -> models.GetObjectAclResult:
+ """
+ Queries the access control list (ACL) of an object in a bucket.
+
+ Args:
+ request (GetObjectAclRequest): Request parameters for GetObjectAcl operation.
+
+ Returns:
+ GetObjectAclResult: Response result for GetObjectAcl operation.
+ """
+
+ return await operations.get_object_acl(self._client, request, **kwargs)
+
+ async def initiate_multipart_upload(self, request: models.InitiateMultipartUploadRequest, **kwargs
+ ) -> models.InitiateMultipartUploadResult:
+ """
+ Initiates a multipart upload task before you can upload data in parts to Object Storage Service (OSS).
+
+ Args:
+ request (InitiateMultipartUploadRequest): Request parameters for InitiateMultipartUpload operation.
+
+ Returns:
+ InitiateMultipartUploadResult: Response result for InitiateMultipartUpload operation.
+ """
+
+ return await operations.initiate_multipart_upload(self._client, request, **kwargs)
+
+ async def upload_part(self, request: models.UploadPartRequest, **kwargs
+ ) -> models.UploadPartResult:
+ """
+ Call the UploadPart interface to upload data in blocks (parts) based on the specified Object name and uploadId.
+
+ Args:
+ request (UploadPartRequest): Request parameters for UploadPart operation.
+
+ Returns:
+ UploadPartResult: Response result for UploadPart operation.
+ """
+
+ return await operations.upload_part(self._client, request, **kwargs)
+
+ async def upload_part_copy(self, request: models.UploadPartCopyRequest, **kwargs
+ ) -> models.UploadPartCopyResult:
+ """
+ You can call this operation to copy data from an existing object to upload a part
+ by adding a x-oss-copy-request header to UploadPart.
+
+ Args:
+ request (UploadPartCopyRequest): Request parameters for UploadPartCopy operation.
+
+ Returns:
+ UploadPartCopyResult: Response result for UploadPartCopy operation.
+ """
+
+ return await operations.upload_part_copy(self._client, request, **kwargs)
+
+ async def complete_multipart_upload(self, request: models.CompleteMultipartUploadRequest, **kwargs
+ ) -> models.CompleteMultipartUploadResult:
+ """
+ Completes the multipart upload task of an object after all parts of the object are uploaded.
+
+ Args:
+ request (CompleteMultipartUploadRequest): Request parameters for CompleteMultipartUpload operation.
+
+ Returns:
+ CompleteMultipartUploadResult: Response result for CompleteMultipartUpload operation.
+ """
+
+ return await operations.complete_multipart_upload(self._client, request, **kwargs)
+
+ async def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, **kwargs
+ ) -> models.AbortMultipartUploadResult:
+ """
+ Cancels a multipart upload task and deletes the parts uploaded in the task.
+
+ Args:
+ request (AbortMultipartUploadRequest): Request parameters for AbortMultipartUpload operation.
+
+ Returns:
+ AbortMultipartUploadResult: Response result for AbortMultipartUpload operation.
+ """
+
+ return await operations.abort_multipart_upload(self._client, request, **kwargs)
+
+ async def list_multipart_uploads(self, request: models.ListMultipartUploadsRequest, **kwargs
+ ) -> models.ListMultipartUploadsResult:
+ """
+ Lists all multipart upload tasks in progress. The tasks are not completed or canceled.
+
+ Args:
+ request (ListMultipartUploadsRequest): Request parameters for ListMultipartUploads operation.
+
+ Returns:
+ ListMultipartUploadsResult: Response result for ListMultipartUploads operation.
+ """
+
+ return await operations.list_multipart_uploads(self._client, request, **kwargs)
+
+ async def list_parts(self, request: models.ListPartsRequest, **kwargs
+ ) -> models.ListPartsResult:
+ """
+ Lists all parts that are uploaded by using a specified upload ID.
+
+ Args:
+ request (ListPartsRequest): Request parameters for ListParts operation.
+
+ Returns:
+ ListPartsResult: Response result for ListParts operation.
+ """
+
+ return await operations.list_parts(self._client, request, **kwargs)
+
+ async def put_symlink(self, request: models.PutSymlinkRequest, **kwargs
+ ) -> models.PutSymlinkResult:
+ """
+ Creates a symbolic link that points to a destination object.
+ You can use the symbolic link to access the destination object.
+
+ Args:
+ request (PutSymlinkRequest): Request parameters for PutSymlink operation.
+
+ Returns:
+ PutSymlinkResult: Response result for PutSymlink operation.
+ """
+
+ return await operations.put_symlink(self._client, request, **kwargs)
+
+ async def get_symlink(self, request: models.GetSymlinkRequest, **kwargs
+ ) -> models.GetSymlinkResult:
+ """
+ Obtains a symbol link. To perform GetSymlink operations, you must have the read permission on the symbol link.
+
+ Args:
+ request (GetSymlinkRequest): Request parameters for GetSymlink operation.
+
+ Returns:
+ GetSymlinkResult: Response result for GetSymlink operation.
+ """
+
+ return await operations.get_symlink(self._client, request, **kwargs)
+
+ async def put_object_tagging(self, request: models.PutObjectTaggingRequest, **kwargs
+ ) -> models.PutObjectTaggingResult:
+ """
+ Adds tags to an object or updates the tags added to the object. Each tag added to an object is a key-value pair.
+
+ Args:
+ request (PutObjectTaggingRequest): Request parameters for PutObjectTagging operation.
+
+ Returns:
+ PutObjectTaggingResult: Response result for PutObjectTagging operation.
+ """
+
+ return await operations.put_object_tagging(self._client, request, **kwargs)
+
+ async def get_object_tagging(self, request: models.GetObjectTaggingRequest, **kwargs
+ ) -> models.GetObjectTaggingResult:
+ """
+ You can call this operation to query the tags of an object.
+
+ Args:
+ request (GetObjectTaggingRequest): Request parameters for GetObjectTagging operation.
+
+ Returns:
+ GetObjectTaggingResult: Response result for GetObjectTagging operation.
+ """
+
+ return await operations.get_object_tagging(self._client, request, **kwargs)
+
+ async def delete_object_tagging(self, request: models.DeleteObjectTaggingRequest, **kwargs
+ ) -> models.DeleteObjectTaggingResult:
+ """
+ You can call this operation to delete the tags of a specified object.
+
+ Args:
+ request (DeleteObjectTaggingRequest): Request parameters for DeleteObjectTagging operation.
+
+ Returns:
+ DeleteObjectTaggingResult: Response result for DeleteObjectTagging operation.
+ """
+
+ return await operations.delete_object_tagging(self._client, request, **kwargs)
+
+ async def process_object(self, request: models.ProcessObjectRequest, **kwargs
+ ) -> models.ProcessObjectResult:
+ """
+ Applies process on the specified image file.
+
+ Args:
+ request (ProcessObjectRequest): Request parameters for ProcessObject operation.
+
+ Returns:
+ ProcessObjectResult: Response result for ProcessObject operation.
+ """
+
+ return await operations.process_object(self._client, request, **kwargs)
+
+ async def async_process_object(self, request: models.AsyncProcessObjectRequest, **kwargs
+ ) -> models.AsyncProcessObjectResult:
+ """
+ Applies async process on the specified image file.
+
+ Args:
+ request (AsyncProcessObjectRequest): Request parameters for AsyncProcessObject operation.
+
+ Returns:
+ AsyncProcessObjectResult: Response result for AsyncProcessObject operation.
+ """
+
+ return await operations.async_process_object(self._client, request, **kwargs)
+
+
+ # others apis
+ async def is_object_exist(self, bucket: str, key: str,
+ version_id: Optional[str] = None,
+ request_payer: Optional[str] = None,
+ **kwargs) -> bool:
+ """Checks if the object exists
+
+ Args:
+ bucket (str, required): The name of the bucket.
+ key (str, required): The name of the object.
+ version_id (str, optional): The version ID of the source object.
+ request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs.
+ Returns:
+ bool: True if the object exists, else False.
+ """
+
+ result = None
+ err = None
+
+ try:
+ result = await self.get_object_meta(models.GetObjectMetaRequest(
+ bucket=bucket,
+ key=key,
+ version_id=version_id,
+ request_payer=request_payer,
+ **kwargs
+ ))
+ except exceptions.OperationError as e:
+ err = e
+ se = e.unwrap()
+ if isinstance(se, exceptions.ServiceError):
+ if ('NoSuchKey' == se.code or
+ (404 == se.status_code and 'BadErrorResponse' == se.code)):
+ return False
+
+ if err is not None:
+ raise err
+
+ return result is not None
+
+ async def is_bucket_exist(self, bucket: str, request_payer: Optional[str] = None, **kwargs) -> bool:
+ """Checks if the bucket exists
+
+ Args:
+ bucket (str, required): The name of the bucket.
+ request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs.
+ Returns:
+ bool: True if the bucket exists, else False.
+ """
+
+ result = None
+ err = None
+
+ try:
+ result = await self.get_bucket_acl(models.GetBucketAclRequest(
+ bucket=bucket,
+ request_payer=request_payer,
+ **kwargs
+ ))
+ except exceptions.OperationError as e:
+ err = e
+ se = e.unwrap()
+ if isinstance(se, exceptions.ServiceError):
+ return not 'NoSuchBucket' == se.code
+
+ if err is not None:
+ raise err
+
+ return result is not None
diff --git a/alibabacloud_oss_v2/aio/operations/__init__.py b/alibabacloud_oss_v2/aio/operations/__init__.py
new file mode 100644
index 0000000..d6cd41e
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/__init__.py
@@ -0,0 +1,6 @@
+
+from .service import *
+from .region import *
+from .bucket_basic import *
+from .object_basic import *
+from .bucket_tags import *
diff --git a/alibabacloud_oss_v2/aio/operations/bucket_basic.py b/alibabacloud_oss_v2/aio/operations/bucket_basic.py
new file mode 100644
index 0000000..ce28d00
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/bucket_basic.py
@@ -0,0 +1,466 @@
+"""APIs for bucket basic operation."""
+# pylint: disable=line-too-long
+
+from ...types import OperationInput, CaseInsensitiveDict
+from ... import serde
+from ... import serde_utils
+from ... import models
+from .._aioclient import _AsyncClientImpl
+
+
+async def put_bucket(client: _AsyncClientImpl, request: models.PutBucketRequest, **kwargs) -> models.PutBucketResult:
+ """
+ put bucket synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutBucketRequest): The request for the PutBucket operation.
+
+ Returns:
+ PutBucketResult: The result for the PutBucket operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutBucket',
+ method='PUT',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutBucketResult(),
+ op_output=op_output,
+ )
+
+
+async def delete_bucket(client: _AsyncClientImpl, request: models.DeleteBucketRequest, **kwargs) -> models.DeleteBucketResult:
+ """
+ delete bucket synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DeleteBucketRequest): The request for the DeleteBucket operation.
+
+ Returns:
+ DeleteBucketResult: The result for the DeleteBucket operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DeleteBucket',
+ method='DELETE',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.DeleteBucketResult(),
+ op_output=op_output,
+ )
+
+
+async def list_objects(client: _AsyncClientImpl, request: models.ListObjectsRequest, **kwargs) -> models.ListObjectsResult:
+ """
+ list objects synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListObjectsRequest): The request for the ListObjects operation.
+
+ Returns:
+ ListObjectsResult: The result for the ListObjects operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListObjects',
+ method='GET',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/octet-stream',
+ }),
+ parameters={
+ 'encoding-type': 'url',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListObjectsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+async def put_bucket_acl(client: _AsyncClientImpl, request: models.PutBucketAclRequest, **kwargs) -> models.PutBucketAclResult:
+ """
+ put bucket acl
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutBucketAclRequest): The request for the PutBucketAcl operation.
+
+ Returns:
+ PutBucketAclResult: The result for the PutBucketAcl operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutBucketAcl',
+ method='PUT',
+ parameters={
+ 'acl': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutBucketAclResult(),
+ op_output=op_output,
+ )
+
+
+async def get_bucket_acl(client: _AsyncClientImpl, request: models.GetBucketAclRequest, **kwargs) -> models.GetBucketAclResult:
+ """
+ get bucket acl
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketAclRequest): The request for the GetBucketAcl operation.
+
+ Returns:
+ GetBucketAclResult: The result for the GetBucketAcl operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketAcl',
+ method='GET',
+ parameters={
+ 'acl': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketAclResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def list_objects_v2(client: _AsyncClientImpl, request: models.ListObjectsV2Request, **kwargs) -> models.ListObjectsV2Result:
+ """
+ list objects synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListObjectsV2Request): The request for the ListObjectsV2 operation.
+
+ Returns:
+ ListObjectsV2Result: The result for the ListObjectsV2 operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListObjectsV2',
+ method='GET',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/octet-stream',
+ }),
+ parameters={
+ 'encoding-type': 'url',
+ 'list-type': 2,
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListObjectsV2Result(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+async def get_bucket_stat(client: _AsyncClientImpl, request: models.GetBucketStatRequest, **kwargs) -> models.GetBucketStatResult:
+ """
+ GetBucketStat Queries the storage capacity of a specified bucket and the number of objects that are stored in the bucket.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketStatRequest): The request for the GetBucketStat operation.
+
+ Returns:
+ GetBucketStatResult: The result for the GetBucketStat operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketStat',
+ method='GET',
+ parameters={
+ 'stat': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketStatResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def get_bucket_location(client: _AsyncClientImpl, request: models.GetBucketLocationRequest, **kwargs) -> models.GetBucketLocationResult:
+ """
+ GetBucketLocation Queries the region of an Object Storage Service (OSS) bucket.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketLocationRequest): The request for the GetBucketLocation operation.
+
+ Returns:
+ GetBucketLocationResult: The result for the GetBucketLocation operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketLocation',
+ method='GET',
+ parameters={
+ 'location': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketLocationResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def get_bucket_info(client: _AsyncClientImpl, request: models.GetBucketInfoRequest, **kwargs) -> models.GetBucketInfoResult:
+ """
+ GetBucketInfo Queries information about a bucket.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketInfoRequest): The request for the GetBucketInfo operation.
+
+ Returns:
+ GetBucketInfoResult: The result for the GetBucketInfo operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketInfo',
+ method='GET',
+ parameters={
+ 'bucketInfo': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketInfoResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def put_bucket_versioning(client: _AsyncClientImpl, request: models.PutBucketVersioningRequest, **kwargs) -> models.PutBucketVersioningResult:
+ """
+ PutBucketVersioning Configures the versioning state for a bucket.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutBucketVersioningRequest): The request for the PutBucketVersioning operation.
+
+ Returns:
+ PutBucketVersioningResult: The result for the PutBucketVersioning operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutBucketVersioning',
+ method='PUT',
+ parameters={
+ 'versioning': '',
+ },
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutBucketVersioningResult(),
+ op_output=op_output,
+ )
+
+
+async def get_bucket_versioning(client: _AsyncClientImpl, request: models.GetBucketVersioningRequest, **kwargs) -> models.GetBucketVersioningResult:
+ """
+ GetBucketVersioning You can call this operation to query the versioning state of a bucket.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketVersioningRequest): The request for the GetBucketVersioning operation.
+
+ Returns:
+ GetBucketVersioningResult: The result for the GetBucketVersioning operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketVersioning',
+ method='GET',
+ parameters={
+ 'versioning': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketVersioningResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def list_object_versions(client: _AsyncClientImpl, request: models.ListObjectVersionsRequest, **kwargs) -> models.ListObjectVersionsResult:
+ """
+ ListObjectVersions Lists the versions of all objects in a bucket, including delete markers.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListObjectVersionsRequest): The request for the ListObjectVersions operation.
+
+ Returns:
+ ListObjectVersionsResult: The result for the ListObjectVersions operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListObjectVersions',
+ method='GET',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/octet-stream',
+ }),
+ parameters={
+ 'versions': '',
+ },
+ bucket=request.bucket,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListObjectVersionsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
diff --git a/alibabacloud_oss_v2/aio/operations/bucket_tags.py b/alibabacloud_oss_v2/aio/operations/bucket_tags.py
new file mode 100644
index 0000000..146b8d4
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/bucket_tags.py
@@ -0,0 +1,131 @@
+# pylint: disable=line-too-long
+
+from ...types import OperationInput, CaseInsensitiveDict
+from ... import serde
+from ... import serde_utils
+from ... import models
+from .._aioclient import _AsyncClientImpl
+
+
+async def put_bucket_tags(client: _AsyncClientImpl, request: models.PutBucketTagsRequest, **kwargs) -> models.PutBucketTagsResult:
+ """
+ put_bucket_tags synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutBucketTagsRequest): The request for the PutBucketTags operation.
+
+ Returns:
+ PutBucketTagsResult: The result for the PutBucketTags operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutBucketTags',
+ method='PUT',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'tagging': '',
+ },
+ bucket=request.bucket,
+ op_metadata={'sub-resource': ['tagging']},
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutBucketTagsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+async def get_bucket_tags(client: _AsyncClientImpl, request: models.GetBucketTagsRequest, **kwargs) -> models.GetBucketTagsResult:
+ """
+ get_bucket_tags synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetBucketTagsRequest): The request for the GetBucketTags operation.
+
+ Returns:
+ GetBucketTagsResult: The result for the GetBucketTags operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetBucketTags',
+ method='GET',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'tagging': '',
+ },
+ bucket=request.bucket,
+ op_metadata={'sub-resource': ['tagging']},
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetBucketTagsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+async def delete_bucket_tags(client: _AsyncClientImpl, request: models.DeleteBucketTagsRequest, **kwargs) -> models.DeleteBucketTagsResult:
+ """
+ delete_bucket_tags synchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DeleteBucketTagsRequest): The request for the DeleteBucketTags operation.
+
+ Returns:
+ DeleteBucketTagsResult: The result for the DeleteBucketTags operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DeleteBucketTags',
+ method='DELETE',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'tagging': '',
+ },
+ bucket=request.bucket,
+ op_metadata={'sub-resource': ['tagging']},
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.DeleteBucketTagsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
diff --git a/alibabacloud_oss_v2/aio/operations/object_basic.py b/alibabacloud_oss_v2/aio/operations/object_basic.py
new file mode 100644
index 0000000..24879a5
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/object_basic.py
@@ -0,0 +1,1041 @@
+"""APIs for bucket basic operation."""
+# pylint: disable=line-too-long
+
+from ...types import OperationInput, CaseInsensitiveDict
+from ... import serde
+from ... import serde_utils
+from ... import models
+from ... import defaults
+from .._aioclient import _AsyncClientImpl
+from ..aio_utils import AsyncStreamBodyReader
+
+
+async def put_object(client: _AsyncClientImpl, request: models.PutObjectRequest, **kwargs) -> models.PutObjectResult:
+ """
+ put object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutObjectRequest): The request for the PutObject operation.
+
+ Returns:
+ PutObjectResult: The result for the PutObject operation.
+ """
+
+ custom_serializer=[
+ serde_utils.add_content_type,
+ serde_utils.add_progress,
+ ]
+
+ if client.has_feature(defaults.FF_ENABLE_CRC64_CHECK_UPLOAD):
+ custom_serializer.append(serde_utils.add_crc_checker)
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutObject',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ ),
+ custom_serializer=custom_serializer
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ serdes = [
+ serde.deserialize_output_headers
+ ]
+
+ if request.callback is not None:
+ serdes.append(serde.deserialize_output_callbackbody)
+
+ return serde.deserialize_output(
+ result=models.PutObjectResult(),
+ op_output=op_output,
+ custom_deserializer=serdes,
+ )
+
+
+async def head_object(client: _AsyncClientImpl, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult:
+ """
+ head object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (HeadObjectRequest): The request for the HeadObject operation.
+
+ Returns:
+ HeadObjectResult: The result for the HeadObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='HeadObject',
+ method='HEAD',
+ bucket=request.bucket,
+ key=request.key,
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.HeadObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def get_object(client: _AsyncClientImpl, request: models.GetObjectRequest, **kwargs) -> models.GetObjectResult:
+ """
+ get object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetObjectRequest): The request for the GetObject operation.
+
+ Returns:
+ GetObjectResult: The result for the GetObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetObject',
+ method='GET',
+ bucket=request.bucket,
+ key=request.key,
+ op_metadata={'response-stream':True}
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetObjectResult(
+ body=AsyncStreamBodyReader(op_output.http_response)
+ ),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def append_object(client: _AsyncClientImpl, request: models.AppendObjectRequest, **kwargs) -> models.AppendObjectResult:
+ """
+ append object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (AppendObjectRequest): The request for the AppendObject operation.
+
+ Returns:
+ AppendObjectResult: The result for the AppendObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='AppendObject',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'append': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_type,
+ serde_utils.add_progress,
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.AppendObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+async def copy_object(client: _AsyncClientImpl, request: models.CopyObjectRequest, **kwargs) -> models.CopyObjectResult:
+ """
+ copy object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (CopyObjectRequest): The request for the CopyObject operation.
+
+ Returns:
+ CopyObjectResult: The result for the CopyObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='CopyObject',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ headers=CaseInsensitiveDict({
+ 'x-oss-copy-source': serde_utils.encode_copy_source(request),
+ }),
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5,
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.CopyObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers,
+ serde.deserialize_output_xmlbody
+ ],
+ )
+
+
+async def delete_object(client: _AsyncClientImpl, request: models.DeleteObjectRequest, **kwargs) -> models.DeleteObjectResult:
+ """
+ copy object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DeleteObjectRequest): The request for the DeleteObject operation.
+
+ Returns:
+ DeleteObjectResult: The result for the DeleteObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DeleteObject',
+ method='DELETE',
+ bucket=request.bucket,
+ key=request.key,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5,
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.DeleteObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers,
+ ],
+ )
+
+
+async def delete_multiple_objects(client: _AsyncClientImpl, request: models.DeleteMultipleObjectsRequest, **kwargs) -> models.DeleteMultipleObjectsResult:
+ """
+ delete multiple objects asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DeleteMultipleObjectsRequest): The request for the DeleteMultipleObjects operation.
+
+ Returns:
+ DeleteMultipleObjectsResult: The result for the DeleteMultipleObjects operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DeleteMultipleObjects',
+ method='POST',
+ bucket=request.bucket,
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'delete': '',
+ 'encoding-type': 'url',
+ }
+ ),
+ custom_serializer=[
+ serde_utils.serialize_delete_objects,
+ serde_utils.add_content_md5,
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.DeleteMultipleObjectsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde.deserialize_output_headers,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+
+async def get_object_meta(client: _AsyncClientImpl, request: models.GetObjectMetaRequest, **kwargs) -> models.GetObjectMetaResult:
+ """
+ get object meta asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetObjectMetaRequest): The request for the GetObjectMeta operation.
+
+ Returns:
+ GetObjectMetaResult: The result for the GetObjectMeta operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetObjectMeta',
+ method='HEAD',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'objectMeta': '',
+ }
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetObjectMetaResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def restore_object(client: _AsyncClientImpl, request: models.RestoreObjectRequest, **kwargs) -> models.RestoreObjectResult:
+ """
+ restore object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (RestoreObjectRequest): The request for the RestoreObject operation.
+
+ Returns:
+ RestoreObjectResult: The result for the RestoreObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='RestoreObject',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'restore': '',
+ }
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.RestoreObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def put_object_acl(client: _AsyncClientImpl, request: models.PutObjectAclRequest, **kwargs) -> models.PutObjectAclResult:
+ """
+ put object acl asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutObjectAclRequest): The request for the PutObjectAcl operation.
+
+ Returns:
+ PutObjectAclResult: The result for the PutObjectAcl operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutObjectAcl',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'acl': '',
+ }
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutObjectAclResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def get_object_acl(client: _AsyncClientImpl, request: models.GetObjectAclRequest, **kwargs) -> models.GetObjectAclResult:
+ """
+ get object acl asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetObjectAclRequest): The request for the GetObjectAcl operation.
+
+ Returns:
+ GetObjectAclResult: The result for the GetObjectAcl operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetObjectAcl',
+ method='GET',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'acl': '',
+ }
+ ),
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetObjectAclResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def initiate_multipart_upload(client: _AsyncClientImpl, request: models.InitiateMultipartUploadRequest, **kwargs) -> models.InitiateMultipartUploadResult:
+ """
+ initiate multipart upload asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (InitiateMultipartUploadRequest): The request for the InitiateMultipartUpload operation.
+
+ Returns:
+ InitiateMultipartUploadResult: The result for the InitiateMultipartUpload operation.
+ """
+
+ serializer = [serde_utils.add_content_md5]
+ if not request.disable_auto_detect_mime_type:
+ serializer.append(serde_utils.add_content_type)
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='InitiateMultipartUpload',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'uploads': '',
+ 'encoding-type': 'url',
+ }
+ ),
+ custom_serializer=serializer,
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.InitiateMultipartUploadResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+
+async def upload_part(client: _AsyncClientImpl, request: models.UploadPartRequest, **kwargs) -> models.UploadPartResult:
+ """
+ upload part asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (UploadPartRequest): The request for the UploadPart operation.
+
+ Returns:
+ UploadPartResult: The result for the UploadPart operation.
+ """
+
+ custom_serializer=[serde_utils.add_progress]
+
+ if client.has_feature(defaults.FF_ENABLE_CRC64_CHECK_UPLOAD):
+ custom_serializer.append(serde_utils.add_crc_checker)
+
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='UploadPart',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ ),
+ custom_serializer=custom_serializer
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.UploadPartResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def upload_part_copy(client: _AsyncClientImpl, request: models.UploadPartCopyRequest, **kwargs) -> models.UploadPartCopyResult:
+ """
+ upload part copy asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (UploadPartCopyRequest): The request for the UploadPartCopy operation.
+
+ Returns:
+ UploadPartCopyResult: The result for the UploadPartCopy operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='UploadPartCopy',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ headers=CaseInsensitiveDict({
+ 'x-oss-copy-source': serde_utils.encode_copy_source(request),
+ }),
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.UploadPartCopyResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def complete_multipart_upload(client: _AsyncClientImpl, request: models.CompleteMultipartUploadRequest, **kwargs) -> models.CompleteMultipartUploadResult:
+ """
+ complete multipart upload asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (CompleteMultipartUploadRequest): The request for the CompleteMultipartUpload operation.
+
+ Returns:
+ CompleteMultipartUploadResult: The result for the CompleteMultipartUpload operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='CompleteMultipartUpload',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'encoding-type': 'url',
+ }
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ deserializer = [serde.deserialize_output_headers]
+ if request.callback is None:
+ deserializer.append(serde.deserialize_output_xmlbody)
+ deserializer.append(serde_utils.deserialize_encode_type)
+ else:
+ deserializer.append(serde.deserialize_output_callbackbody)
+
+ return serde.deserialize_output(
+ result=models.CompleteMultipartUploadResult(),
+ op_output=op_output,
+ custom_deserializer=deserializer,
+ )
+
+
+async def abort_multipart_upload(client: _AsyncClientImpl, request: models.AbortMultipartUploadRequest, **kwargs) -> models.AbortMultipartUploadResult:
+ """
+ abort multipart upload asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (AbortMultipartUploadRequest): The request for the AbortMultipartUpload operation.
+
+ Returns:
+ AbortMultipartUploadResult: The result for the AbortMultipartUpload operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='AbortMultipartUpload',
+ method='DELETE',
+ bucket=request.bucket,
+ key=request.key,
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.AbortMultipartUploadResult(),
+ op_output=op_output,
+ )
+
+
+
+async def list_multipart_uploads(client: _AsyncClientImpl, request: models.ListMultipartUploadsRequest, **kwargs) -> models.ListMultipartUploadsResult:
+ """
+ list multipart uploads asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListMultipartUploadsRequest): The request for the ListMultipartUploads operation.
+
+ Returns:
+ ListMultipartUploadsResult: The result for the ListMultipartUploads operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListMultipartUploads',
+ method='GET',
+ bucket=request.bucket,
+ parameters={
+ 'encoding-type': 'url',
+ 'uploads': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListMultipartUploadsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+
+
+async def list_parts(client: _AsyncClientImpl, request: models.ListPartsRequest, **kwargs) -> models.ListPartsResult:
+ """
+ list parts asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListPartsRequest): The request for the ListParts operation.
+
+ Returns:
+ ListPartsResult: The result for the ListParts operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListParts',
+ method='GET',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'encoding-type': 'url',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListPartsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde_utils.deserialize_encode_type
+ ],
+ )
+
+
+
+async def put_symlink(client: _AsyncClientImpl, request: models.PutSymlinkRequest, **kwargs) -> models.PutSymlinkResult:
+ """
+ put symlink asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutSymlinkRequest): The request for the PutSymlink operation.
+
+ Returns:
+ PutSymlinkResult: The result for the PutSymlink operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutSymlink',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'symlink': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutSymlinkResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+
+async def get_symlink(client: _AsyncClientImpl, request: models.GetSymlinkRequest, **kwargs) -> models.GetSymlinkResult:
+ """
+ get symlink asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetSymlinkRequest): The request for the GetSymlink operation.
+
+ Returns:
+ GetSymlinkResult: The result for the GetSymlink operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetSymlink',
+ method='GET',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'symlink': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetSymlinkResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+
+async def put_object_tagging(client: _AsyncClientImpl, request: models.PutObjectTaggingRequest, **kwargs) -> models.PutObjectTaggingResult:
+ """
+ put object tagging asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (PutObjectTaggingRequest): The request for the PutObjectTagging operation.
+
+ Returns:
+ PutObjectTaggingResult: The result for the PutObjectTagging operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='PutObjectTagging',
+ method='PUT',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'tagging': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.PutObjectTaggingResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+
+async def get_object_tagging(client: _AsyncClientImpl, request: models.GetObjectTaggingRequest, **kwargs) -> models.GetObjectTaggingResult:
+ """
+ get object tagging asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (GetObjectTaggingRequest): The request for the GetObjectTagging operation.
+
+ Returns:
+ GetObjectTaggingResult: The result for the GetObjectTagging operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='GetObjectTagging',
+ method='GET',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'tagging': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetObjectTaggingResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+
+async def delete_object_tagging(client: _AsyncClientImpl, request: models.DeleteObjectTaggingRequest, **kwargs) -> models.DeleteObjectTaggingResult:
+ """
+ delete object tagging asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DeleteObjectTaggingRequest): The request for the DeleteObjectTagging operation.
+
+ Returns:
+ DeleteObjectTaggingResult: The result for the DeleteObjectTagging operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DeleteObjectTagging',
+ method='DELETE',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'tagging': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.GetObjectTaggingResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers
+ ],
+ )
+
+
+async def process_object(client: _AsyncClientImpl, request: models.ProcessObjectRequest, **kwargs) -> models.ProcessObjectResult:
+ """
+ process object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ProcessObjectRequest): The request for the ProcessObject operation.
+
+ Returns:
+ ProcessObjectResult: The result for the ProcessObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ProcessObject',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'x-oss-process': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_process_action,
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ProcessObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers,
+ serde_utils.deserialize_process_body
+ ],
+ )
+
+
+async def async_process_object(client: _AsyncClientImpl, request: models.AsyncProcessObjectRequest, **kwargs) -> models.AsyncProcessObjectResult:
+ """
+ async process object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (AsyncProcessObjectRequest): The request for the AsyncProcessObject operation.
+
+ Returns:
+ AsyncProcessObjectResult: The result for the AsyncProcessObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='AsyncProcessObject',
+ method='POST',
+ bucket=request.bucket,
+ key=request.key,
+ parameters={
+ 'x-oss-async-process': '',
+ },
+ ),
+ custom_serializer=[
+ serde_utils.add_process_action,
+ serde_utils.add_content_md5
+ ],
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.AsyncProcessObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_headers,
+ serde_utils.deserialize_process_body
+ ],
+ )
+
+async def clean_restored_object(client: _AsyncClientImpl, request: models.CleanRestoredObjectRequest, **kwargs) -> models.CleanRestoredObjectResult:
+ """
+ clean_restored_object asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (CleanRestoredObjectRequest): The request for the CleanRestoredObject operation.
+
+ Returns:
+ CleanRestoredObjectResult: The result for the CleanRestoredObject operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='CleanRestoredObject',
+ method='POST',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/xml',
+ }),
+ parameters={
+ 'cleanRestoredObject': '',
+ },
+ bucket=request.bucket,
+ key=request.key,
+ op_metadata={'sub-resource': ['cleanRestoredObject']},
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.CleanRestoredObjectResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
diff --git a/alibabacloud_oss_v2/aio/operations/region.py b/alibabacloud_oss_v2/aio/operations/region.py
new file mode 100644
index 0000000..8df99ea
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/region.py
@@ -0,0 +1,40 @@
+"""APIs for region operation."""
+# pylint: disable=line-too-long
+
+from ...types import OperationInput, CaseInsensitiveDict
+from ... import serde
+from ... import models
+from .._aioclient import _AsyncClientImpl
+
+async def describe_regions(client: _AsyncClientImpl, request: models.DescribeRegionsRequest, **kwargs) -> models.DescribeRegionsResult:
+ """
+ Queries the endpoints of all regions supported by Object Storage Service (OSS) or a specific region, including public endpoints, internal endpoints, and acceleration endpoints.
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (DescribeRegionsRequest): The request for the DescribeRegions operation.
+
+ Returns:
+ DescribeRegionsResult: The result for the DescribeRegions operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='DescribeRegions',
+ method='GET',
+ parameters={
+ 'regions': '',
+ },
+ )
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.DescribeRegionsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody
+ ],
+ )
diff --git a/alibabacloud_oss_v2/aio/operations/service.py b/alibabacloud_oss_v2/aio/operations/service.py
new file mode 100644
index 0000000..920cfe0
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/operations/service.py
@@ -0,0 +1,44 @@
+"""APIs for service operation."""
+# pylint: disable=line-too-long
+
+from ...types import OperationInput, CaseInsensitiveDict
+from ... import serde
+from ... import serde_utils
+from ... import models
+from .._aioclient import _AsyncClientImpl
+
+async def list_buckets(client: _AsyncClientImpl, request: models.ListBucketsRequest, **kwargs) -> models.ListBucketsResult:
+ """
+ list buckets asynchronously
+
+ Args:
+ client (_AsyncClientImpl): A agent that sends the request.
+ request (ListBucketsRequest): The request for the ListBuckets operation.
+
+ Returns:
+ ListBucketsResult: The result for the ListBuckets operation.
+ """
+
+ op_input = serde.serialize_input(
+ request=request,
+ op_input=OperationInput(
+ op_name='ListBuckets',
+ method='GET',
+ headers=CaseInsensitiveDict({
+ 'Content-Type': 'application/octet-stream',
+ }),
+ ),
+ custom_serializer=[
+ serde_utils.add_content_md5
+ ]
+ )
+
+ op_output = await client.invoke_operation(op_input, **kwargs)
+
+ return serde.deserialize_output(
+ result=models.ListBucketsResult(),
+ op_output=op_output,
+ custom_deserializer=[
+ serde.deserialize_output_xmlbody,
+ ],
+ )
diff --git a/alibabacloud_oss_v2/aio/transport/__init__.py b/alibabacloud_oss_v2/aio/transport/__init__.py
new file mode 100644
index 0000000..0564eba
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/transport/__init__.py
@@ -0,0 +1,2 @@
+# httpclient implement
+from .aiohttp_client import AioHttpClient
diff --git a/alibabacloud_oss_v2/aio/transport/aiohttp_client.py b/alibabacloud_oss_v2/aio/transport/aiohttp_client.py
new file mode 100644
index 0000000..38f78b2
--- /dev/null
+++ b/alibabacloud_oss_v2/aio/transport/aiohttp_client.py
@@ -0,0 +1,271 @@
+"""AsyncHttpClient implement based on aiohttp
+"""
+from typing import cast, MutableMapping, Optional, Type, AsyncIterator, Any, Callable
+from types import TracebackType
+import collections.abc
+import asyncio
+
+try:
+ import aiohttp
+ import aiohttp.client_exceptions
+except ImportError:
+ raise ImportError("Please install aiohttp by `pip install aiohttp`")
+
+from ...types import AsyncHttpClient, HttpRequest, AsyncHttpResponse
+from ... import exceptions
+from ... import defaults
+
+class _ResponseStopIteration(Exception):
+ pass
+
+class _AioHttpStreamDownloadGenerator(collections.abc.AsyncIterator):
+ """Streams the response body data.
+ """
+
+ def __init__(
+ self,
+ response: "_AioHttpResponseImpl",
+ block_size: Optional[int] = None
+ ) -> None:
+ self.response = response
+ self.block_size = block_size or response._block_size
+ self.content_length = int(response.headers.get("Content-Length", 0))
+
+ def __len__(self):
+ return self.content_length
+
+ async def __anext__(self):
+ error: Optional[Exception] = None
+ internal_response = self.response._internal_response # pylint: disable=protected-access
+ try:
+ chunk = await internal_response.content.read(self.block_size) # pylint: disable=protected-access
+ if not chunk:
+ raise _ResponseStopIteration()
+ return chunk
+ except _ResponseStopIteration:
+ internal_response.close()
+ raise StopAsyncIteration() # pylint: disable=raise-missing-from
+ except aiohttp.client_exceptions.ClientPayloadError as err:
+ error = err
+ internal_response.close()
+ except aiohttp.client_exceptions.ClientResponseError as err:
+ error = err
+ except asyncio.TimeoutError as err:
+ error = err
+ except aiohttp.client_exceptions.ClientError as err:
+ error = err
+ except Exception as err: # pylint: disable=broad-exception-caught
+ internal_response.close()
+
+ if error is not None:
+ raise error
+
+class _AioHttpResponseImpl(AsyncHttpResponse):
+ """
+ Implementation class for AsyncHttpResponse from aiohttp's response
+ """
+
+ def __init__(self, **kwargs) -> None:
+ super().__init__()
+ self._request = kwargs.pop("request")
+ self._block_size = kwargs.pop("block_size", None) or 4096
+ self._internal_response = cast(aiohttp.ClientResponse, kwargs.pop("internal_response"))
+ self._is_closed = False
+ self._is_stream_consumed = False
+ self._content: Optional[bytes] = None
+ self._stream_download_generator: Callable = _AioHttpStreamDownloadGenerator
+
+ @property
+ def request(self) -> HttpRequest:
+ return self._request
+
+ @property
+ def is_closed(self) -> bool:
+ return self._is_closed
+
+ @property
+ def is_stream_consumed(self) -> bool:
+ return self._is_stream_consumed
+
+ @property
+ def status_code(self) -> int:
+ return self._internal_response.status
+
+ @property
+ def headers(self) -> MutableMapping[str, str]:
+ return self._internal_response.headers
+
+ @property
+ def reason(self) -> str:
+ return self._internal_response.reason
+
+ @property
+ def content(self) -> bytes:
+ if self._content is None:
+ raise exceptions.ResponseNotReadError()
+ return self._content
+
+ async def read(self) -> bytes:
+ if not self._content:
+ self._stream_download_check()
+ self._content = await self._internal_response.read()
+ await self._set_read_checks()
+ return self._content
+
+ async def close(self) -> None:
+ if not self.is_closed:
+ self._is_closed = True
+ self._internal_response.close()
+ await asyncio.sleep(0)
+
+ async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]:
+ """Asynchronously iterates over the response's bytes.
+
+ Args:
+ block_size (int, optional): The number of bytes it should read into memory.
+
+ Returns:
+ AsyncIterator[bytes]: An async iterator of bytes from the response
+ """
+ block_size = kwargs.pop("block_size", self._block_size)
+
+ if self._content is not None:
+ for i in range(0, len(self.content), block_size):
+ yield self.content[i : i + block_size]
+ else:
+ self._stream_download_check()
+ async for part in self._stream_download_generator(response=self, block_size=block_size):
+ yield part
+ await self.close()
+
+
+ async def __aenter__(self) -> "_AioHttpResponseImpl":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+ def _stream_download_check(self):
+ if self._is_stream_consumed:
+ raise exceptions.StreamConsumedError()
+ if self.is_closed:
+ raise exceptions.StreamClosedError()
+ self._is_stream_consumed = True
+
+ async def _set_read_checks(self):
+ self._is_stream_consumed = True
+ await self.close()
+
+class AioHttpClient(AsyncHttpClient):
+ """Implements a basic aiohttp HTTP sender.
+ """
+
+ def __init__(self, **kwargs) -> None:
+ self.session_owner = False
+ self.session = kwargs.get("session", None)
+ # client's configuration
+ self._connect_timeout = kwargs.get(
+ "connect_timeout", defaults.DEFAULT_CONNECT_TIMEOUT)
+ self._read_timeout = kwargs.get(
+ "readwrite_timeout", defaults.DEFAULT_READWRITE_TIMEOUT)
+ self._max_connections = kwargs.get(
+ "max_connections", defaults.DEFAULT_MAX_CONNECTIONS)
+ self._verify = True
+ if kwargs.get("insecure_skip_verify") is True:
+ self._verify = False
+ self._allow_redirects = kwargs.get("enabled_redirect", False)
+ self._proxies = kwargs.get("proxy_host", None)
+ self._block_size = kwargs.get("block_size", defaults.DEFAULT_BLOCK_SIZE)
+
+ async def __aenter__(self):
+ await self.open()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+ async def open(self):
+ if not self.session:
+ clientsession_kwargs = {}
+ self.session = aiohttp.ClientSession(**clientsession_kwargs)
+ self.session_owner = True
+ self.session = cast(aiohttp.ClientSession, self.session)
+ await self.session.__aenter__()
+
+ async def close(self):
+ if self.session_owner and self.session:
+ await self.session.close()
+ self.session_owner = False
+ self.session = None
+
+ async def send(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
+ await self.open()
+ error: Optional[Exception] = None
+ resp: _AioHttpResponseImpl = None
+
+ try:
+ # api's configuration
+ connect_timeout = kwargs.pop("connect_timeout", self._connect_timeout)
+ read_timeout = kwargs.pop("readwrite_timeout", self._read_timeout)
+ stream = kwargs.pop("stream", False)
+ socket_timeout = aiohttp.ClientTimeout(
+ sock_connect=connect_timeout,
+ sock_read=read_timeout
+ )
+ response = await self.session.request( # type: ignore
+ request.method,
+ request.url,
+ headers=request.headers,
+ data=request.body,
+ timeout=socket_timeout,
+ allow_redirects=False,
+ skip_auto_headers={"Content-Type", "Accept-Encoding"},
+ proxy=self._proxies,
+ **kwargs
+ )
+
+ resp = _AioHttpResponseImpl(
+ request=request,
+ internal_response=response,
+ block_size=self._block_size
+ )
+
+ if not stream:
+ await _handle_no_stream_response(resp)
+
+ except aiohttp.client_exceptions.ClientResponseError as err:
+ error = exceptions.ResponseError(error=err)
+ except asyncio.TimeoutError as err:
+ error = exceptions.ResponseError(error=err)
+ except aiohttp.client_exceptions.ClientError as err:
+ error = exceptions.RequestError(error=err)
+
+ if error:
+ raise error
+
+ return resp
+
+async def _handle_no_stream_response(response: "_AioHttpResponseImpl") -> None:
+ """Handle reading and closing of non stream rest responses.
+ For our new rest responses, we have to call .read() and .close() for our non-stream
+ responses. This way, we load in the body for users to access.
+
+ :param response: The response to read and close.
+ :type response: ~azure.core.rest.AsyncHttpResponse
+ """
+ try:
+ await response.read()
+ await response.close()
+ except Exception as exc:
+ await response.close()
+ raise exc
diff --git a/alibabacloud_oss_v2/io_utils.py b/alibabacloud_oss_v2/io_utils.py
index a2e4e75..e1dce5c 100644
--- a/alibabacloud_oss_v2/io_utils.py
+++ b/alibabacloud_oss_v2/io_utils.py
@@ -180,7 +180,8 @@ def __init__(
seekable = is_seekable_io(data)
self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR)
self._seekable = seekable
-
+ self._check_type_done = False
+ self._do_cast = False
def __len__(self):
return self._total
@@ -198,7 +199,15 @@ def next(self):
d = self._data.read(self._block_size)
if d:
- return d
+ if not self._check_type_done:
+ self._check_type_done = True
+ if isinstance(d, str):
+ self._do_cast = True
+
+ if self._do_cast:
+ return d.encode()
+ else:
+ return d
raise StopIteration
@@ -220,9 +229,10 @@ def __init__(
self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR)
self._total = utils.guess_content_length(data)
self._seekable = seekable
-
if self._total is not None:
setattr(self, '__len__', lambda x: x._total)
+ self._check_type_done = False
+ self._do_cast = False
def iter_bytes(self):
"""iter bytes
@@ -238,7 +248,15 @@ def next(self):
d = self._data.read(self._block_size)
if d:
- return d
+ if not self._check_type_done:
+ self._check_type_done = True
+ if isinstance(d, str):
+ self._do_cast = True
+
+ if self._do_cast:
+ return d.encode()
+ else:
+ return d
raise StopIteration
diff --git a/alibabacloud_oss_v2/models/object_basic.py b/alibabacloud_oss_v2/models/object_basic.py
index 3235985..f1a9fc7 100644
--- a/alibabacloud_oss_v2/models/object_basic.py
+++ b/alibabacloud_oss_v2/models/object_basic.py
@@ -3,9 +3,9 @@
# pylint: disable=super-init-not-called, too-many-lines, line-too-long, too-many-arguments
# pylint: disable=too-many-locals
import datetime
-from typing import Optional, Dict, Any, MutableMapping, List
+from typing import Optional, Dict, Any, MutableMapping, List, Union
from .. import serde
-from ..types import BodyType, StreamBody
+from ..types import BodyType, StreamBody, AsyncStreamBody
from .bucket_basic import Owner
@@ -526,7 +526,7 @@ def __init__(
restore: Optional[str] = None,
process_status: Optional[str] = None,
delete_marker: Optional[bool] = None,
- body: Optional[StreamBody] = None,
+ body: Optional[Union[StreamBody, AsyncStreamBody]] = None,
**kwargs: Any
) -> None:
"""
diff --git a/alibabacloud_oss_v2/transport/requests_client.py b/alibabacloud_oss_v2/transport/requests_client.py
index 3696abd..ed7a6a2 100644
--- a/alibabacloud_oss_v2/transport/requests_client.py
+++ b/alibabacloud_oss_v2/transport/requests_client.py
@@ -224,3 +224,4 @@ def send(self, request: HttpRequest, **kwargs) -> HttpResponse:
raise error
return resp
+
diff --git a/alibabacloud_oss_v2/types.py b/alibabacloud_oss_v2/types.py
index ad10c87..6e5132f 100644
--- a/alibabacloud_oss_v2/types.py
+++ b/alibabacloud_oss_v2/types.py
@@ -13,6 +13,8 @@
Mapping,
Set,
Dict,
+ AsyncIterator,
+ AsyncContextManager,
)
from requests.structures import CaseInsensitiveDict
@@ -450,3 +452,117 @@ def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]:
:return: An iterator of bytes from the stream
:rtype: Iterator[str]
"""
+
+
+class AsyncStreamBody(abc.ABC):
+ """Abstract base class for a AsyncStreamBody."""
+
+ @abc.abstractmethod
+ async def __aenter__(self):
+ """Return `self` upon entering the runtime context."""
+
+ @abc.abstractmethod
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ """Raise any exception triggered within the runtime context."""
+
+ @property
+ @abc.abstractmethod
+ def is_closed(self) -> bool:
+ """Whether the stream has been closed yet.
+
+ :rtype: bool
+ :return: Whether the stream has been closed yet.
+ """
+
+ @property
+ @abc.abstractmethod
+ def is_stream_consumed(self) -> bool:
+ """Whether the stream has been consumed.
+
+ :rtype: bool
+ :return: Whether the stream has been consumed.
+ """
+
+ @property
+ @abc.abstractmethod
+ def content(self) -> bytes:
+ """Content of the stream, in bytes.
+
+ :rtype: bytes
+ :return: The stream's content in bytes.
+ """
+
+ @abc.abstractmethod
+ async def read(self) -> bytes:
+ """Read the stream's bytes.
+
+ :return: The read in bytes
+ :rtype: bytes
+ """
+
+ @abc.abstractmethod
+ async def close(self) -> None:
+ """close the stream"""
+
+ @abc.abstractmethod
+ async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]:
+ """Iterates over the stream's bytes. Will decompress in the process.
+
+ :return: An iterator of bytes from the stream
+ :rtype: Iterator[str]
+ """
+
+class AsyncHttpResponse(_HttpResponseBase, AsyncContextManager["AsyncHttpResponse"]):
+ """Abstract base class for a HttpResponse, the response from an HTTP request."""
+
+ @abc.abstractmethod
+ async def read(self) -> bytes:
+ """Read the response's bytes.
+
+ :return: The read in bytes
+ :rtype: bytes
+ """
+
+ @abc.abstractmethod
+ async def close(self) -> None:
+ """close the response"""
+
+ @abc.abstractmethod
+ async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]:
+ """Asynchronously iterates over the response's bytes. Will decompress in the process.
+
+ :return: An async iterator of bytes from the response
+ :rtype: AsyncIterator[bytes]
+ """
+ raise NotImplementedError()
+ yield # pylint: disable=unreachable
+
+ def __repr__(self) -> str:
+ return f''
+
+
+class AsyncHttpClient(abc.ABC):
+ """Abstract base class for Async HTTP client."""
+
+ @abc.abstractmethod
+ async def send(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
+ """Sends an HTTP request and returns an HTTP response.
+
+ An error is returned if caused by client policy (such as CheckRedirect),
+ or failure to speak HTTP (such as a network connectivity problem).
+ A non-2xx status code doesn't cause an error.
+
+ :type request: Any
+ :param request: the http request sent to server.
+
+ :rtype: httpResponse
+ :return: The response object.
+ """
+
+ @abc.abstractmethod
+ async def open(self) -> None:
+ """Assign new session if one does not already exist."""
+
+ @abc.abstractmethod
+ async def close(self) -> None:
+ """Close the session if it is not externally owned."""
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
index 1919e20..e80f5f1 100644
--- a/tests/integration/__init__.py
+++ b/tests/integration/__init__.py
@@ -12,7 +12,7 @@
from urllib.parse import quote
import requests
import alibabacloud_oss_v2 as oss
-
+import alibabacloud_oss_v2.aio as ossaio
ACCESS_ID = os.getenv("OSS_TEST_ACCESS_KEY_ID")
ACCESS_KEY = os.getenv("OSS_TEST_ACCESS_KEY_SECRET")
@@ -62,6 +62,7 @@ def get_default_client() -> oss.Client:
return _defaultClient
+
def get_invalid_ak_client() -> oss.Client:
global _invalidAkClient
if _invalidAkClient is not None:
@@ -109,6 +110,22 @@ def get_client_use_ststoken(region:str, endpoint:str) -> oss.Client:
return oss.Client(cfg)
+def get_async_client(region:str, endpoint:str) -> ossaio.AsyncClient:
+ cfg = oss.config.load_default()
+ cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY)
+ cfg.region = region
+ cfg.endpoint = endpoint
+ return ossaio.AsyncClient(cfg)
+
+def get_async_client(region:str, endpoint:str, provider:oss.credentials.CredentialsProvider = None) -> ossaio.AsyncClient:
+ cfg = oss.config.load_default()
+ if provider is None:
+ provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY)
+ cfg.credentials_provider = provider
+ cfg.region = region
+ cfg.endpoint = endpoint
+ return ossaio.AsyncClient(cfg)
+
def get_kms_id(region:str) ->str:
return
diff --git a/tests/integration/test_bucket_basic_async.py b/tests/integration/test_bucket_basic_async.py
new file mode 100644
index 0000000..9502951
--- /dev/null
+++ b/tests/integration/test_bucket_basic_async.py
@@ -0,0 +1,463 @@
+# pylint: skip-file
+from typing import cast
+import tempfile
+import datetime
+import requests
+import unittest
+import alibabacloud_oss_v2 as oss
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ ENDPOINT,
+ OBJECTNAME_PREFIX,
+ get_async_client,
+)
+
+class TestBucketBasicAsync(TestIntegration, unittest.IsolatedAsyncioTestCase):
+
+ async def asyncSetUp(self):
+ self.async_client = get_async_client(REGION, ENDPOINT)
+ self.invalid_async_client = get_async_client(
+ REGION,
+ ENDPOINT,
+ oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid')
+ )
+
+ async def asyncTearDown(self):
+ await self.async_client.close()
+ await self.invalid_async_client.close()
+
+ async def test_put_bucket(self):
+
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ async def test_put_bucket_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+
+ async def test_bucket_acl(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # get bucket acl
+ result = await self.async_client.get_bucket_acl(oss.GetBucketAclRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('private', result.acl)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # put bucket acl
+ result = await self.async_client.put_bucket_acl(oss.PutBucketAclRequest(
+ bucket=bucket_name,
+ acl='public-read-write'
+ ))
+ self.assertEqual(200, result.status_code)
+
+ # get bucket acl
+ result = await self.async_client.get_bucket_acl(oss.GetBucketAclRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual('public-read-write', result.acl)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ async def test_put_bucket_acl_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.put_bucket_acl(oss.PutBucketAclRequest(
+ bucket=bucket_name,
+ acl='public-read-write'
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ async def test_get_bucket_acl_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.get_bucket_acl(oss.GetBucketAclRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+
+ async def test_list_objects_v2(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ result = await self.async_client.list_objects_v2(oss.ListObjectsV2Request(
+ bucket=bucket_name,
+ delimiter='/',
+ start_after='b',
+ encoding_type='url',
+ continuation_token='',
+ max_keys=10,
+ prefix='aaa',
+ fetch_owner=True,
+ request_payer='requester',
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ self.assertEqual(bucket_name, result.name)
+ self.assertEqual('/', result.delimiter)
+ self.assertEqual('b', result.start_after)
+ self.assertEqual('url', result.encoding_type)
+ self.assertEqual(10, result.max_keys)
+ self.assertEqual('aaa', result.prefix)
+
+
+ async def test_list_objects_v2_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.list_objects_v2(oss.ListObjectsV2Request(
+ bucket=bucket_name,))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ try:
+ await self.invalid_async_client.list_objects_v2(oss.ListObjectsV2Request(
+ bucket=bucket_name,
+ delimiter='/',
+ start_after='b',
+ encoding_type='url',
+ continuation_token='',
+ max_keys=10,
+ prefix='aaa',
+ fetch_owner=True,
+ request_payer='requester',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ async def test_get_bucket_stat(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # get bucket stat
+ result = await self.async_client.get_bucket_stat(oss.models.GetBucketStatRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+ self.assertTrue(result.storage==0)
+ self.assertTrue(result.object_count==0)
+ self.assertTrue(result.multi_part_upload_count==0)
+ self.assertTrue(result.live_channel_count==0)
+ self.assertTrue(result.last_modified_time==0)
+ self.assertTrue(result.standard_storage==0)
+ self.assertTrue(result.standard_object_count==0)
+ self.assertTrue(result.infrequent_access_storage==0)
+ self.assertTrue(result.infrequent_access_real_storage==0)
+ self.assertTrue(result.infrequent_access_object_count==0)
+ self.assertTrue(result.archive_storage==0)
+ self.assertTrue(result.archive_real_storage==0)
+ self.assertTrue(result.archive_object_count==0)
+ self.assertTrue(result.cold_archive_storage==0)
+ self.assertTrue(result.cold_archive_real_storage==0)
+ self.assertTrue(result.cold_archive_object_count==0)
+ self.assertTrue(result.deep_cold_archive_storage==0)
+ self.assertTrue(result.deep_cold_archive_real_storage==0)
+ self.assertTrue(result.deep_cold_archive_object_count==0)
+ self.assertTrue(result.delete_marker_count==0)
+
+ async def test_get_bucket_stat_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.get_bucket_stat(oss.models.GetBucketStatRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ async def test_get_bucket_location(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # get bucket location
+ result = await self.async_client.get_bucket_location(oss.models.GetBucketLocationRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(f'oss-{REGION}', result.location)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ async def test_get_bucket_location_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.get_bucket_location(oss.models.GetBucketLocationRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ async def test_get_bucket_info(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # get bucket into
+ result = await self.async_client.get_bucket_info(oss.models.GetBucketInfoRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ self.assertEqual('private', result.bucket_info.acl)
+ self.assertEqual('Disabled', result.bucket_info.access_monitor)
+ self.assertEqual(False, result.bucket_info.block_public_access)
+ self.assertEqual('LRS', result.bucket_info.data_redundancy_type)
+ self.assertEqual('Disabled', result.bucket_info.cross_region_replication)
+ self.assertIsNotNone(result.bucket_info.resource_group_id)
+ self.assertIsNotNone(result.bucket_info.creation_date)
+ self.assertIsNotNone(result.bucket_info.extranet_endpoint)
+ self.assertIsNotNone(result.bucket_info.intranet_endpoint)
+ self.assertIsNotNone(result.bucket_info.location)
+ self.assertIsNotNone(result.bucket_info.transfer_acceleration)
+ self.assertEqual('IA', result.bucket_info.storage_class)
+ self.assertIsNotNone(result.bucket_info.owner.id)
+ self.assertIsNotNone(result.bucket_info.owner.display_name)
+ self.assertIsNotNone(result.bucket_info.sse_rule.sse_algorithm)
+
+
+ async def test_get_bucket_info_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.get_bucket_info(oss.models.GetBucketInfoRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+
+ async def test_bucket_versions(self):
+ bucket_name = random_bucket_name()
+ result = await self.async_client.put_bucket(oss.PutBucketRequest(
+ bucket=bucket_name,
+ acl='private',
+ create_bucket_configuration=oss.CreateBucketConfiguration(
+ storage_class='IA'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # put bucket versioning
+ result = await self.async_client.put_bucket_versioning(oss.PutBucketVersioningRequest(
+ bucket=bucket_name,
+ versioning_configuration=oss.VersioningConfiguration(
+ status='Enabled'
+ )
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+
+ # get bucket versioning
+ result = await self.async_client.get_bucket_versioning(oss.GetBucketVersioningRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('Enabled', result.version_status)
+
+ # list object versions
+ result = await self.async_client.list_object_versions(oss.ListObjectVersionsRequest(
+ bucket=bucket_name,
+ ))
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+ self.assertEqual(bucket_name, result.name)
+ self.assertEqual(False, result.is_truncated)
+ self.assertEqual(100, result.max_keys)
+
+ # list object versions case 2
+ result = await self.async_client.list_object_versions(oss.ListObjectVersionsRequest(
+ bucket=bucket_name,
+ delimiter='/',
+ key_marker='MARKER',
+ max_keys=999,
+ prefix='AA/a',
+ encoding_type='url',
+ ))
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+ self.assertEqual(bucket_name, result.name)
+ self.assertEqual(False, result.is_truncated)
+ self.assertEqual(999, result.max_keys)
+ self.assertEqual('/', result.delimiter)
+ self.assertEqual('MARKER', result.key_marker)
+ self.assertEqual('AA/a', result.prefix)
+ self.assertEqual('url', result.encoding_type)
+
+ async def test_put_bucket_versioning_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.put_bucket_versioning(oss.PutBucketVersioningRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ async def test_get_bucket_versioning_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.get_bucket_versioning(oss.GetBucketVersioningRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
+
+ async def test_list_object_versions_fail(self):
+ bucket_name = random_bucket_name()
+ try:
+ await self.invalid_async_client.list_object_versions(oss.ListObjectVersionsRequest(
+ bucket=bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(404, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('NoSuchBucket', serr.code)
diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py
index 7e3eed5..44d51d3 100644
--- a/tests/integration/test_client.py
+++ b/tests/integration/test_client.py
@@ -1,12 +1,23 @@
# pylint: skip-file
from typing import cast
import os
-import time
+import io
import tempfile
import datetime
import requests
import alibabacloud_oss_v2 as oss
-from . import TestIntegration, random_bucket_name, random_str, REGION, OBJECTNAME_PREFIX, get_client
+import alibabacloud_oss_v2.crc as osscrc
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ OBJECTNAME_PREFIX,
+ ENDPOINT,
+ ACCESS_ID,
+ ACCESS_KEY,
+ get_client
+)
from urllib.parse import quote, unquote
class TestBucketBasic(TestIntegration):
@@ -444,6 +455,198 @@ def test_list_object_versions_fail(self):
self.assertEqual('NoSuchBucket', serr.code)
+ def test_put_object_with_defferent_body_type(self):
+ len = 300 * 1024 + 1234
+ data = random_str(len)
+
+ crc64 = osscrc.Crc64(0)
+ crc64.update(data.encode())
+ ccrc = str(crc64.sum64())
+
+ # str
+ key = 'test-key-defferent_body-str'
+ result = self.client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = self.client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # bytes
+ key = 'test-key-defferent_body-bytes'
+ result = self.client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data.encode(),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = self.client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[str]
+ key = 'test-key-defferent_body-io-str'
+ result = self.client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.StringIO(data),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = self.client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[bytes]
+ key = 'test-key-defferent_body-io-bytes'
+ result = self.client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.BytesIO(data.encode()),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = self.client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ def test_put_object_with_defferent_body_type_disable_crc(self):
+ len = 350 * 1024 + 1234
+ data = random_str(len)
+
+ crc64 = osscrc.Crc64(0)
+ crc64.update(data.encode())
+ ccrc = str(crc64.sum64())
+
+ cfg = oss.config.load_default()
+ cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY)
+ cfg.region = REGION
+ cfg.endpoint = ENDPOINT
+ cfg.disable_upload_crc64_check = True
+ client = oss.Client(cfg)
+
+ # str
+ key = 'test-key-defferent_body-no-crc-str'
+ result = client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # bytes
+ key = 'test-key-defferent_body-no-crc-bytes'
+ result = client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data.encode(),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[str]
+ key = 'test-key-defferent_body-io-no-crc-str'
+ result = client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.StringIO(data),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[bytes]
+ key = 'test-key-defferent_body-io-no-crc-bytes'
+ result = client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.BytesIO(data.encode()),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+
class TestRegion(TestIntegration):
def test_describe_regions(self):
result = self.client.describe_regions(oss.DescribeRegionsRequest(
diff --git a/tests/integration/test_client_extension_async.py b/tests/integration/test_client_extension_async.py
new file mode 100644
index 0000000..3d70fed
--- /dev/null
+++ b/tests/integration/test_client_extension_async.py
@@ -0,0 +1,107 @@
+# pylint: skip-file
+from typing import cast
+import tempfile
+import datetime
+import requests
+import unittest
+import alibabacloud_oss_v2 as oss
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ ENDPOINT,
+ OBJECTNAME_PREFIX,
+ get_async_client,
+)
+
+class TestExtensionAsync(TestIntegration, unittest.IsolatedAsyncioTestCase):
+
+ async def asyncSetUp(self):
+ self.async_client = get_async_client(REGION, ENDPOINT)
+ self.invalid_async_client = get_async_client(
+ REGION,
+ ENDPOINT,
+ oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid')
+ )
+
+ async def asyncTearDown(self):
+ await self.async_client.close()
+ await self.invalid_async_client.close()
+
+
+ async def test_is_bucket_exist(self):
+ no_perm_client = self.invalid_async_client
+ err_client = get_async_client("", "")
+
+ bucket_name_no_exist = self.bucket_name + "-no-exist"
+
+ exist = await self.async_client.is_bucket_exist(self.bucket_name)
+ self.assertTrue(exist)
+
+ exist = await self.async_client.is_bucket_exist(bucket_name_no_exist)
+ self.assertFalse(exist)
+
+ exist = await no_perm_client.is_bucket_exist(self.bucket_name)
+ self.assertTrue(exist)
+
+ exist = await no_perm_client.is_bucket_exist(bucket_name_no_exist)
+ self.assertFalse(exist)
+
+ try:
+ exist = await err_client.is_bucket_exist(self.bucket_name)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('invalid field, endpoint', str(err))
+
+ async def test_is_object_exist(self):
+ bucket_name_no_exist = self.bucket_name + "-no-exist"
+ object_name = 'object-exist'
+ object_name_no_exist = "object-no-exist"
+ no_perm_client = self.invalid_async_client
+ err_client = get_async_client("", "")
+
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=object_name,
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ exist = await self.async_client.is_object_exist(self.bucket_name, object_name)
+ self.assertTrue(exist)
+
+ exist = await self.async_client.is_object_exist(self.bucket_name, object_name_no_exist)
+ self.assertFalse(exist)
+
+ try:
+ exist = await self.async_client.is_object_exist(bucket_name_no_exist, object_name)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('NoSuchBucket', str(err))
+
+ try:
+ exist = await self.async_client.is_object_exist(bucket_name_no_exist, object_name_no_exist)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('NoSuchBucket', str(err))
+
+
+ try:
+ exist = await no_perm_client.is_object_exist(self.bucket_name, object_name)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('InvalidAccessKeyId', str(err))
+
+ try:
+ exist = await no_perm_client.is_object_exist(bucket_name_no_exist, object_name_no_exist)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('NoSuchBucket', str(err))
+
+ try:
+ exist = await err_client.is_object_exist(self.bucket_name, object_name)
+ self.fail("shoud not here")
+ except oss.exceptions.OperationError as err:
+ self.assertIn('invalid field, endpoint', str(err))
+
diff --git a/tests/integration/test_multipart_upload_async.py b/tests/integration/test_multipart_upload_async.py
new file mode 100644
index 0000000..d92442e
--- /dev/null
+++ b/tests/integration/test_multipart_upload_async.py
@@ -0,0 +1,477 @@
+# pylint: skip-file
+from typing import cast
+import tempfile
+import datetime
+import requests
+import unittest
+import alibabacloud_oss_v2 as oss
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ ENDPOINT,
+ OBJECTNAME_PREFIX,
+ get_async_client,
+)
+
+class TestMultipartUpload(TestIntegration, unittest.IsolatedAsyncioTestCase):
+ async def asyncSetUp(self):
+ self.async_client = get_async_client(REGION, ENDPOINT)
+ self.invalid_async_client = get_async_client(
+ REGION,
+ ENDPOINT,
+ oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid')
+ )
+
+ async def asyncTearDown(self):
+ await self.async_client.close()
+ await self.invalid_async_client.close()
+
+ async def test_multipart_upload_object(self):
+ length1 = 100*1024
+ data1 = random_str(length1)
+ length2 = 1234
+ data2 = random_str(length2)
+ key = OBJECTNAME_PREFIX + random_str(16)
+
+ result = await self.async_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.InitiateMultipartUploadResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(self.bucket_name, result.bucket)
+ self.assertEqual(key, result.key)
+ self.assertIsNotNone(key, result.upload_id)
+
+ presult1 = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ part_number=1,
+ upload_id=result.upload_id,
+ body=data1,
+ ))
+ self.assertIsNotNone(presult1)
+ self.assertIsInstance(presult1, oss.UploadPartResult)
+ self.assertEqual(200, presult1.status_code)
+ self.assertIsNotNone(presult1.content_md5)
+ self.assertIsNotNone(presult1.etag)
+ self.assertIsNotNone(presult1.hash_crc64)
+
+ presult2 = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ part_number=2,
+ upload_id=result.upload_id,
+ body=data2,
+ ))
+ self.assertIsNotNone(presult2)
+ self.assertIsInstance(presult2, oss.UploadPartResult)
+ self.assertEqual(200, presult2.status_code)
+ self.assertIsNotNone(presult2.content_md5)
+ self.assertIsNotNone(presult2.etag)
+ self.assertIsNotNone(presult2.hash_crc64)
+
+ lpresult = await self.async_client.list_parts(oss.ListPartsRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=result.upload_id,
+ ))
+ self.assertIsNotNone(lpresult)
+ self.assertIsInstance(lpresult, oss.ListPartsResult)
+ self.assertEqual(200, lpresult.status_code)
+ self.assertEqual(self.bucket_name, lpresult.bucket)
+ self.assertEqual(key, lpresult.key)
+ self.assertEqual(2, lpresult.next_part_number_marker)
+ self.assertEqual(0, lpresult.part_number_marker)
+ self.assertEqual(False, lpresult.is_truncated)
+ self.assertEqual(1000, lpresult.max_parts)
+ self.assertEqual('Standard', lpresult.storage_class)
+ self.assertEqual(2, len(lpresult.parts))
+ self.assertEqual(1, lpresult.parts[0].part_number)
+ self.assertEqual(length1, lpresult.parts[0].size)
+ self.assertEqual(presult1.etag, lpresult.parts[0].etag)
+ self.assertEqual(presult1.hash_crc64, lpresult.parts[0].hash_crc64)
+ self.assertEqual(2, lpresult.parts[1].part_number)
+ self.assertEqual(length2, lpresult.parts[1].size)
+ self.assertEqual(presult2.etag, lpresult.parts[1].etag)
+ self.assertEqual(presult2.hash_crc64, lpresult.parts[1].hash_crc64)
+
+ cresult = await self.async_client.complete_multipart_upload(oss.CompleteMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=result.upload_id,
+ body=data2,
+ complete_multipart_upload=oss.CompleteMultipartUpload(
+ parts=[
+ oss.UploadPart(part_number=1, etag=presult1.etag),
+ oss.UploadPart(part_number=2, etag=presult2.etag),
+ ]
+ )
+ ))
+ self.assertIsNotNone(cresult)
+ self.assertIsInstance(cresult, oss.CompleteMultipartUploadResult)
+ self.assertEqual(200, cresult.status_code)
+ self.assertEqual(self.bucket_name, cresult.bucket)
+ self.assertEqual(key, cresult.key)
+ self.assertIsNotNone(cresult.etag)
+ self.assertIsNotNone(cresult.hash_crc64)
+
+ gresult = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(gresult)
+ self.assertEqual(200, gresult.status_code)
+ self.assertEqual(length1 + length2, gresult.content_length)
+ items = []
+ async for item in await gresult.body.iter_bytes():
+ items.append(item)
+ rdata = b''.join(items) or b''
+ self.assertEqual(data1 + data2, rdata.decode())
+
+ async def test_multipart_upload_object_special_key(self):
+ length1 = 100*1024
+ data1 = random_str(length1)
+ length2 = 1234
+ data2 = random_str(length2)
+ str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
+ key = OBJECTNAME_PREFIX + random_str(16) + str1.decode()
+
+ result = await self.async_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.InitiateMultipartUploadResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(self.bucket_name, result.bucket)
+ self.assertEqual(key, result.key)
+ self.assertIsNotNone(key, result.upload_id)
+
+ presult1 = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ part_number=1,
+ upload_id=result.upload_id,
+ body=data1,
+ ))
+ self.assertIsNotNone(presult1)
+ self.assertIsInstance(presult1, oss.UploadPartResult)
+ self.assertEqual(200, presult1.status_code)
+ self.assertIsNotNone(presult1.content_md5)
+ self.assertIsNotNone(presult1.etag)
+ self.assertIsNotNone(presult1.hash_crc64)
+
+ presult2 = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ part_number=2,
+ upload_id=result.upload_id,
+ body=data2,
+ ))
+ self.assertIsNotNone(presult2)
+ self.assertIsInstance(presult2, oss.UploadPartResult)
+ self.assertEqual(200, presult2.status_code)
+ self.assertIsNotNone(presult2.content_md5)
+ self.assertIsNotNone(presult2.etag)
+ self.assertIsNotNone(presult2.hash_crc64)
+
+ cresult = await self.async_client.complete_multipart_upload(oss.CompleteMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=result.upload_id,
+ body=data2,
+ complete_multipart_upload=oss.CompleteMultipartUpload(
+ parts=[
+ oss.UploadPart(part_number=1, etag=presult1.etag),
+ oss.UploadPart(part_number=2, etag=presult2.etag),
+ ]
+ )
+ ))
+ self.assertIsNotNone(cresult)
+ self.assertIsInstance(cresult, oss.CompleteMultipartUploadResult)
+ self.assertEqual(200, cresult.status_code)
+ self.assertEqual(self.bucket_name, cresult.bucket)
+ self.assertEqual(key, cresult.key)
+ self.assertIsNotNone(cresult.etag)
+ self.assertIsNotNone(cresult.hash_crc64)
+
+ gresult = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(gresult)
+ self.assertEqual(200, gresult.status_code)
+ self.assertEqual(length1 + length2, gresult.content_length)
+ self.assertEqual((data1 + data2).encode(), gresult.body.content)
+
+
+ async def test_multipart_upload_object_encoding_type(self):
+ str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10'
+ key = OBJECTNAME_PREFIX + random_str(16) + str1.decode()
+
+ result = await self.async_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.InitiateMultipartUploadResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(self.bucket_name, result.bucket)
+ self.assertEqual(key, result.key)
+ self.assertIsNotNone(key, result.upload_id)
+
+ presult1 = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ part_number=1,
+ upload_id=result.upload_id,
+ body='hello world',
+ ))
+ self.assertIsNotNone(presult1)
+ self.assertIsInstance(presult1, oss.UploadPartResult)
+ self.assertEqual(200, presult1.status_code)
+ self.assertIsNotNone(presult1.content_md5)
+ self.assertIsNotNone(presult1.etag)
+ self.assertIsNotNone(presult1.hash_crc64)
+
+ lpresult = await self.async_client.list_parts(oss.ListPartsRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=result.upload_id,
+ ))
+ self.assertIsNotNone(lpresult)
+ self.assertIsInstance(lpresult, oss.ListPartsResult)
+ self.assertEqual(200, lpresult.status_code)
+ self.assertEqual(self.bucket_name, lpresult.bucket)
+ self.assertEqual(key, lpresult.key)
+
+ luresult = await self.async_client.list_multipart_uploads(oss.ListMultipartUploadsRequest(
+ bucket=self.bucket_name,
+ ))
+ self.assertIsNotNone(luresult)
+ self.assertIsInstance(luresult, oss.ListMultipartUploadsResult)
+ self.assertEqual(200, luresult.status_code)
+ self.assertEqual(self.bucket_name, luresult.bucket)
+ self.assertEqual(False, luresult.is_truncated)
+ self.assertEqual(None, luresult.key_marker)
+ self.assertEqual(key, luresult.next_key_marker)
+ self.assertEqual(1, len(luresult.uploads))
+ self.assertEqual(key, luresult.uploads[0].key)
+ self.assertEqual(result.upload_id, luresult.uploads[0].upload_id)
+
+ abresult = await self.async_client.abort_multipart_upload(oss.AbortMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=result.upload_id,
+ ))
+ self.assertIsNotNone(abresult)
+ self.assertIsInstance(abresult, oss.AbortMultipartUploadResult)
+ self.assertEqual(204, abresult.status_code)
+
+ async def test_multipart_upload_from_file(self):
+ part_size = 100 * 1024
+ data_size = 3 * part_size + 1245
+ data = random_str(data_size).encode()
+ key = 'multipart-file.bin'
+
+ #init
+ initresult = await self.async_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(initresult)
+ self.assertIsInstance(initresult, oss.InitiateMultipartUploadResult)
+ self.assertEqual(200, initresult.status_code)
+
+ #upload part
+ part_number = 1
+ upload_parts = []
+ with tempfile.TemporaryFile('w+b') as f:
+ f.write(data)
+ for start in range(0, data_size, part_size):
+ n = part_size
+ if start + n > data_size:
+ n = data_size - start
+ reader = oss.io_utils.SectionReader(oss.io_utils.ReadAtReader(f), start, n)
+ upresult = await self.async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=initresult.upload_id,
+ part_number=part_number,
+ body=reader
+ ))
+ self.assertIsNotNone(upresult)
+ self.assertIsInstance(upresult, oss.UploadPartResult)
+ self.assertEqual(200, upresult.status_code)
+ upload_parts.append(oss.UploadPart(part_number=part_number, etag=upresult.etag))
+ part_number += 1
+
+ self.assertEqual(4, len(upload_parts))
+
+ #listpart
+ lpresult = await self.async_client.list_parts(oss.ListPartsRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=initresult.upload_id
+ ))
+ self.assertIsNotNone(lpresult)
+ self.assertIsInstance(lpresult, oss.ListPartsResult)
+ self.assertEqual(200, lpresult.status_code)
+
+ #complete
+ parts = sorted(upload_parts, key=lambda p: p.part_number)
+ cmresult = await self.async_client.complete_multipart_upload(oss.CompleteMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key=key,
+ upload_id=initresult.upload_id,
+ complete_multipart_upload=oss.CompleteMultipartUpload(
+ parts=parts
+ )
+ ))
+ self.assertIsNotNone(cmresult)
+ self.assertIsInstance(cmresult, oss.CompleteMultipartUploadResult)
+ self.assertEqual(200, cmresult.status_code)
+
+ # get object and check
+ gowresult = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key
+ ))
+ self.assertIsNotNone(gowresult)
+ self.assertIsInstance(gowresult, oss.GetObjectResult)
+ self.assertEqual(200, gowresult.status_code)
+ self.assertEqual(data_size, len(gowresult.body.content))
+ self.assertEqual(data, gowresult.body.content)
+
+ async def test_initiate_multipart_upload_fail(self):
+ try:
+ await self.invalid_async_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('InitiateMultipartUpload', str(e))
+ self.assertIn('Endpoint: POST', str(e))
+
+ async def test_upload_part_fail(self):
+ try:
+ await self.invalid_async_client.upload_part(oss.UploadPartRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ upload_id='upload-id',
+ part_number=1,
+ body='hello world'
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('UploadPart', str(e))
+ self.assertIn('Endpoint: PUT', str(e))
+
+ async def test_upload_part_copy_fail(self):
+ try:
+ await self.invalid_async_client.upload_part_copy(oss.UploadPartCopyRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ source_key='source-invalid-key',
+ upload_id='upload-id',
+ part_number=1,
+ body='hello world'
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('UploadPartCopy', str(e))
+ self.assertIn('Endpoint: PUT', str(e))
+
+ async def test_complete_multipart_upload_fail(self):
+ try:
+ await self.invalid_async_client.complete_multipart_upload(oss.CompleteMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ upload_id='upload-id',
+ complete_all='yes'
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('CompleteMultipartUpload', str(e))
+ self.assertIn('Endpoint: POST', str(e))
+
+ async def test_abort_multipart_upload_fail(self):
+ try:
+ await self.invalid_async_client.abort_multipart_upload(oss.AbortMultipartUploadRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ upload_id='upload-id',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('AbortMultipartUpload', str(e))
+ self.assertIn('Endpoint: DELETE', str(e))
+
+ async def test_list_multipart_uploads_fail(self):
+ try:
+ await self.invalid_async_client.list_multipart_uploads(oss.ListMultipartUploadsRequest(
+ bucket=self.bucket_name,
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('ListMultipartUploads', str(e))
+ self.assertIn('Endpoint: GET', str(e))
+
+ async def test_list_parts_fail(self):
+ try:
+ await self.invalid_async_client.list_parts(oss.ListPartsRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ upload_id='upload-id',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('ListParts', str(e))
+ self.assertIn('Endpoint: GET', str(e))
diff --git a/tests/integration/test_object_basic_async.py b/tests/integration/test_object_basic_async.py
new file mode 100644
index 0000000..2257075
--- /dev/null
+++ b/tests/integration/test_object_basic_async.py
@@ -0,0 +1,658 @@
+# pylint: skip-file
+import io
+from typing import cast
+import unittest
+import alibabacloud_oss_v2 as oss
+import alibabacloud_oss_v2.crc as osscrc
+from alibabacloud_oss_v2.aio.client import AsyncClient
+
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ ENDPOINT,
+ OBJECTNAME_PREFIX,
+ ACCESS_ID,
+ ACCESS_KEY,
+ get_async_client,
+)
+
+class TestObjectBasicAsync(TestIntegration, unittest.IsolatedAsyncioTestCase):
+ async def asyncSetUp(self):
+ self.async_client = get_async_client(REGION, ENDPOINT)
+ self.invalid_async_client = get_async_client(
+ REGION,
+ ENDPOINT,
+ oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid')
+ )
+
+ async def asyncTearDown(self):
+ await self.async_client.close()
+ await self.invalid_async_client.close()
+
+ async def test_object_basic(self):
+ len = 1 * 1024 * 1024 + 1234
+ #len = 1234
+ data = random_str(len)
+ key = 'test-key'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+
+ result = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.GetObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+
+ self.assertEqual(data.encode(), result.body.content)
+ #await result.body.close()
+
+ result = await self.async_client.get_object_meta(oss.GetObjectMetaRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.GetObjectMetaResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+
+ async def test_put_object_fail(self):
+ try:
+ await self.invalid_async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ body=b'hello world',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('PutObject', str(e))
+ self.assertIn('Endpoint: PUT', str(e))
+
+ async def test_get_object_fail(self):
+ try:
+ await self.invalid_async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('GetObject', str(e))
+ self.assertIn('Endpoint: GET', str(e))
+
+ async def test_head_object_fail(self):
+ try:
+ await self.invalid_async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('HeadObject', str(e))
+ self.assertIn('Endpoint: HEAD', str(e))
+
+ async def test_get_object_meta_fail(self):
+ try:
+ await self.invalid_async_client.get_object_meta(oss.GetObjectMetaRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('GetObjectMeta', str(e))
+ self.assertIn('Endpoint: HEAD', str(e))
+
+ async def test_get_object_range(self):
+ len = 12345
+ step = 2512
+ data = random_str(len)
+ key = 'test-key-range'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ rdata = b''
+ for r in range(0, len, step):
+ gresult = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ range_header=f'bytes={r}-{r+step-1}',
+ range_behavior='standard'
+ ))
+ self.assertIsNotNone(gresult)
+ self.assertEqual(206, gresult.status_code)
+ self.assertLessEqual(gresult.content_length, step)
+ got = b''
+ async for item in await gresult.body.iter_bytes():
+ got += item
+ rdata += got
+
+ self.assertEqual(data.encode(), rdata)
+ await gresult.body.close()
+
+ async def test_append_object(self):
+ data1 = b'hello'
+ data2 = b' world'
+
+ key = 'append_object'
+ result = await self.async_client.append_object(oss.AppendObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ position=0,
+ body=data1,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(5, result.next_position)
+
+ result = await self.async_client.append_object(oss.AppendObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ position=result.next_position,
+ body=data2,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(11, result.next_position)
+
+ gresult = await self.async_client.get_object(oss.GetObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+
+ self.assertEqual(b'hello world', gresult.body.content)
+ await gresult.body.close()
+
+ async def test_append_object_fail(self):
+ try:
+ await self.invalid_async_client.append_object(oss.AppendObjectRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ position=0,
+ body=b'hello world',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ async def test_delete_object(self):
+ length = 1234
+ data = random_str(length)
+ key = f'test-key-delete-object-{random_str(16)}'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(length, result.content_length)
+
+ result = await self.async_client.delete_object(oss.DeleteObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(204, result.status_code)
+ self.assertIsInstance(result, oss.DeleteObjectResult)
+
+ try:
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ except Exception as err:
+ self.assertIsNotNone(result)
+ self.assertIn('NoSuchKey', str(err))
+
+
+ key = f'test-key-delete-object-no-exist-{random_str(16)}'
+ result = await self.async_client.delete_object(oss.DeleteObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(204, result.status_code)
+ self.assertIsNone(result.version_id)
+ self.assertIsNone(result.delete_marker)
+ self.assertIsInstance(result, oss.DeleteObjectResult)
+
+
+ async def test_delete_object_fail(self):
+ try:
+ await self.invalid_async_client.delete_object(oss.DeleteObjectRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+
+ async def test_delete_multiple_objects(self):
+ length = 1234
+ data = random_str(length)
+ key = OBJECTNAME_PREFIX + random_str(16)
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest(
+ bucket=self.bucket_name,
+ objects=[oss.DeleteObject(key=key)],
+ ))
+ self.assertIsInstance(result, oss.DeleteMultipleObjectsResult)
+ self.assertEqual(200, result.status_code)
+ self.assertIsNotNone(result.headers.get('x-oss-request-id'))
+ self.assertEqual(1, len(result.deleted_objects))
+ self.assertEqual(key, result.deleted_objects[0].key)
+
+ str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
+ key = OBJECTNAME_PREFIX + random_str(16) + str1.decode()
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(length, result.content_length)
+
+ result = await self.async_client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest(
+ bucket=self.bucket_name,
+ encoding_type='url',
+ objects=[oss.DeleteObject(key=key)],
+ ))
+ self.assertIsInstance(result, oss.DeleteMultipleObjectsResult)
+ self.assertEqual(200, result.status_code)
+ self.assertIsNotNone(result.headers.get('x-oss-request-id'))
+ self.assertEqual(1, len(result.deleted_objects))
+ self.assertEqual(key, result.deleted_objects[0].key)
+
+ try:
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ except Exception as err:
+ self.assertIsInstance(err, oss.exceptions.OperationError)
+ err = cast(oss.exceptions.OperationError, err)
+ serr = err.unwrap()
+ self.assertIsInstance(serr, oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, serr)
+ self.assertIn('NoSuchKey', serr.code)
+
+ async def test_restore_object(self):
+ length = 123
+ data = random_str(length)
+ key = OBJECTNAME_PREFIX + random_str(16)
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ storage_class=oss.StorageClassType.ARCHIVE,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.restore_object(oss.RestoreObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.RestoreObjectResult)
+ self.assertEqual(202, result.status_code)
+
+ try:
+ result = await self.async_client.restore_object(oss.RestoreObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.fail("should not here")
+ except Exception as err:
+ self.assertIsInstance(err, oss.exceptions.OperationError)
+ err = cast(oss.exceptions.OperationError, err)
+ serr = err.unwrap()
+ self.assertIsInstance(serr, oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, serr)
+ self.assertIn('RestoreAlreadyInProgress', serr.code)
+ self.assertIn('The restore operation is in progress.', serr.message)
+
+ async def test_object_acl(self):
+ length = 123
+ data = random_str(length)
+ key = OBJECTNAME_PREFIX + random_str(16)
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ acl=oss.ObjectACLType.PRIVATE,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.get_object_acl(oss.GetObjectAclRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.GetObjectAclResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('private', result.acl)
+
+ result = await self.async_client.put_object_acl(oss.PutObjectAclRequest(
+ bucket=self.bucket_name,
+ key=key,
+ acl=oss.ObjectACLType.PUBLICREAD
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectAclResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.get_object_acl(oss.GetObjectAclRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.GetObjectAclResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('public-read', result.acl)
+
+ async def test_get_object_acl_fail(self):
+ try:
+ await self.invalid_async_client.get_object_acl(oss.GetObjectAclRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+ self.assertIn('GetObjectAcl', str(e))
+ self.assertIn('Endpoint: GET', str(e))
+
+ async def test_put_object_acl_fail(self):
+ try:
+ await self.invalid_async_client.put_object_acl(oss.PutObjectAclRequest(
+ bucket=self.bucket_name,
+ key='invalid-key',
+ acl='private',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertIn('PutObjectAcl', str(e))
+ self.assertIn('Endpoint: PUT', str(e))
+
+ async def test_put_object_with_defferent_body_type(self):
+ len = 300 * 1024 + 1234
+ data = random_str(len)
+
+ crc64 = osscrc.Crc64(0)
+ crc64.update(data.encode())
+ ccrc = str(crc64.sum64())
+
+ # str
+ key = 'test-key-defferent_body-str'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # bytes
+ key = 'test-key-defferent_body-bytes'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data.encode(),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[str]
+ key = 'test-key-defferent_body-io-str'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.StringIO(data),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[bytes]
+ key = 'test-key-defferent_body-io-bytes'
+ result = await self.async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.BytesIO(data.encode()),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await self.async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ async def test_put_object_with_defferent_body_type_disable_crc(self):
+ len = 350 * 1024 + 1234
+ data = random_str(len)
+
+ crc64 = osscrc.Crc64(0)
+ crc64.update(data.encode())
+ ccrc = str(crc64.sum64())
+
+ cfg = oss.config.load_default()
+ cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY)
+ cfg.region = REGION
+ cfg.endpoint = ENDPOINT
+ cfg.disable_upload_crc64_check = True
+
+ async with AsyncClient(cfg) as async_client:
+ # str
+ key = 'test-key-defferent_body-no-crc-str'
+ result = await async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # bytes
+ key = 'test-key-defferent_body-no-crc-bytes'
+ result = await async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=data.encode(),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[str]
+ key = 'test-key-defferent_body-io-no-crc-str'
+ result = await async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.StringIO(data),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
+ # IO[bytes]
+ key = 'test-key-defferent_body-io-no-crc-bytes'
+ result = await async_client.put_object(oss.PutObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ body=io.BytesIO(data.encode()),
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.PutObjectResult)
+ self.assertEqual(200, result.status_code)
+
+ result = await async_client.head_object(oss.HeadObjectRequest(
+ bucket=self.bucket_name,
+ key=key,
+ ))
+ self.assertIsNotNone(result)
+ self.assertIsInstance(result, oss.HeadObjectResult)
+ self.assertEqual(200, result.status_code)
+ self.assertEqual(len, result.content_length)
+ self.assertEqual(ccrc, result.hash_crc64)
+
diff --git a/tests/integration/test_region_async.py b/tests/integration/test_region_async.py
new file mode 100644
index 0000000..4c83357
--- /dev/null
+++ b/tests/integration/test_region_async.py
@@ -0,0 +1,75 @@
+# pylint: skip-file
+from typing import cast
+import unittest
+import alibabacloud_oss_v2 as oss
+from . import (
+ TestIntegration,
+ random_bucket_name,
+ random_str,
+ REGION,
+ ENDPOINT,
+ OBJECTNAME_PREFIX,
+ get_async_client,
+)
+
+class TestRegionAsync(TestIntegration, unittest.IsolatedAsyncioTestCase):
+
+ async def asyncSetUp(self):
+ self.async_client = get_async_client(REGION, ENDPOINT)
+ self.invalid_async_client = get_async_client(
+ REGION,
+ ENDPOINT,
+ oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid')
+ )
+
+ async def asyncTearDown(self):
+ await self.async_client.close()
+ await self.invalid_async_client.close()
+
+ async def test_describe_regions(self):
+ result = await self.async_client.describe_regions(oss.DescribeRegionsRequest(
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+ self.assertTrue(result.region_info.__len__()>1)
+
+
+ result = await self.async_client.describe_regions(oss.DescribeRegionsRequest(
+ regions='oss-cn-hangzhou',
+ ))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual(24, len(result.request_id))
+ self.assertEqual(24, len(result.headers.get('x-oss-request-id')))
+ self.assertTrue(result.region_info.__len__()==1)
+ self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[0].accelerate_endpoint)
+ self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.region_info[0].internal_endpoint)
+ self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.region_info[0].internet_endpoint)
+ self.assertEqual('oss-cn-hangzhou', result.region_info[0].region)
+
+ async def test_describe_regions_fail(self):
+ try:
+ await self.invalid_async_client.describe_regions(oss.DescribeRegionsRequest())
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ try:
+ await self.invalid_async_client.describe_regions(oss.DescribeRegionsRequest(
+ regions='oss-cn-hangzhou',
+ ))
+ self.fail("should not here")
+ except Exception as e:
+ ope = cast(oss.exceptions.OperationError, e)
+ self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError)
+ serr = cast(oss.exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual(24, len(serr.request_id))
+ self.assertEqual('InvalidAccessKeyId', serr.code)
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
index 41605b8..4f5845c 100644
--- a/tests/unit/__init__.py
+++ b/tests/unit/__init__.py
@@ -1,8 +1,10 @@
# pylint: skip-file
from typing import Any
from alibabacloud_oss_v2 import _client
+from alibabacloud_oss_v2.aio._aioclient import _AsyncClientImpl
from alibabacloud_oss_v2 import config, credentials
from alibabacloud_oss_v2.types import HttpRequest, HttpResponse, HttpClient
+from alibabacloud_oss_v2.types import HttpRequest, AsyncHttpResponse, AsyncHttpClient
class MockHttpResponse(HttpResponse):
def __init__(self, **kwargs) -> None:
@@ -111,4 +113,131 @@ def mock_client(request_fn, response_fn, **kwargs):
response_fn=response_fn,
kwargs=kwargs
)
- return _client._SyncClientImpl(cfg)
\ No newline at end of file
+ return _client._SyncClientImpl(cfg)
+
+
+
+class MockAsyncHttpResponse(AsyncHttpResponse):
+ def __init__(self, **kwargs) -> None:
+ super(MockAsyncHttpResponse, self).__init__()
+ self._status_code = kwargs.pop("status_code", None)
+ self._reason = kwargs.pop("reason", None)
+ self._headers = kwargs.pop("headers", None)
+ self._body = kwargs.pop("body", None)
+ self._is_closed = False
+ self._is_stream_consumed = False
+ self._request: HttpRequest = None
+
+ @property
+ def request(self) -> HttpRequest:
+ return self._request
+
+ @property
+ def is_closed(self) -> bool:
+ return self._is_closed
+
+ @property
+ def is_stream_consumed(self) -> bool:
+ return self._is_stream_consumed
+
+ @property
+ def status_code(self) -> int:
+ return self._status_code or 0
+
+ @property
+ def headers(self):
+ return self._headers or {}
+
+ @property
+ def reason(self) -> str:
+ return self._reason or ''
+
+ @property
+ def content(self) -> bytes:
+ if self._body is not None:
+ if not isinstance(self._body, (bytes, str)):
+ raise TypeError(f"not support type {type(self._body)}")
+ if isinstance(self._body, str):
+ return self._body.encode()
+ return self._body
+
+ def __repr__(self) -> str:
+ return 'MockHttpResponse'
+
+ async def __aenter__(self) -> "MockAsyncHttpResponse":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type = None,
+ exc_value = None,
+ traceback = None,
+ ) -> None:
+ await self.close()
+
+ async def close(self) -> None:
+ if not self.is_closed:
+ self._is_closed = True
+
+ async def read(self) -> bytes:
+ return self.content
+
+ async def iter_bytes(self, **kwargs):
+ data = b''
+ block_size = kwargs.get('block_size', 8*1024)
+ if self._body is not None:
+ data = self._body
+ if not isinstance(self._body, (bytes, str)):
+ raise TypeError(f"not support type {type(self._body)}")
+ if isinstance(self._body, str):
+ data = self._body.encode()
+
+ for i in range(0, len(data), block_size):
+ yield self.content[i : i + block_size]
+
+class MockAsyncHttpClient(AsyncHttpClient):
+
+ def __init__(self, request_fn, response_fn, **kwargs) -> None:
+ super(MockAsyncHttpClient, self).__init__()
+ self._request_fn = request_fn
+ self._response_fn = response_fn
+
+ async def send(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
+ if self._request_fn is not None:
+ self._request_fn(request)
+
+ if self._response_fn is not None:
+ response = self._response_fn()
+ response._request = request
+ return response
+
+ raise NotImplementedError()
+
+ async def open(self) -> None:
+ return
+
+ async def close(self) -> None:
+ return
+
+
+def mock_client(request_fn, response_fn, **kwargs):
+ cfg = config.load_default()
+ cfg.region = 'cn-hangzhou'
+ cfg.credentials_provider = credentials.AnonymousCredentialsProvider()
+ cfg.http_client = MockHttpClient(
+ request_fn=request_fn,
+ response_fn=response_fn,
+ kwargs=kwargs
+ )
+ return _client._SyncClientImpl(cfg)
+
+def mock_async_client(request_fn, response_fn, **kwargs):
+ cfg = config.load_default()
+ cfg.region = 'cn-hangzhou'
+ cfg.credentials_provider = credentials.AnonymousCredentialsProvider()
+ cfg.http_client = MockAsyncHttpClient(
+ request_fn=request_fn,
+ response_fn=response_fn,
+ kwargs=kwargs
+ )
+ return _AsyncClientImpl(cfg)
\ No newline at end of file
diff --git a/tests/unit/aio/__init__.py b/tests/unit/aio/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/unit/aio/operations/__init__.py b/tests/unit/aio/operations/__init__.py
new file mode 100644
index 0000000..106f164
--- /dev/null
+++ b/tests/unit/aio/operations/__init__.py
@@ -0,0 +1,77 @@
+# pylint: skip-file
+import unittest
+from alibabacloud_oss_v2.types import HttpRequest, HttpResponse, HttpClient
+from ... import MockAsyncHttpResponse, mock_async_client
+
+class TestOperations(unittest.IsolatedAsyncioTestCase):
+ def setUp(self):
+ self.set_requestFunc(None)
+ self.set_responseFunc(None)
+
+ def tearDown(self):
+ pass
+
+ @classmethod
+ def setUpClass(cls):
+ cls.request_dump: HttpRequest = None
+ cls.client = mock_async_client(cls.requestFunc, cls.responseFunc)
+ cls.invoke_request = None
+ cls.invoke_response = None
+
+ @classmethod
+ def tearDownClass(cls):
+ pass
+
+ @classmethod
+ def requestFunc(cls, request: HttpRequest):
+ cls.request_dump = request
+ if cls.invoke_request is not None:
+ cls.invoke_request(request)
+
+ @classmethod
+ def responseFunc(cls) -> MockAsyncHttpResponse:
+ if cls.invoke_response is not None:
+ return cls.invoke_response()
+
+ return MockAsyncHttpResponse(
+ status_code=200,
+ reason='OK',
+ headers={'x-oss-request-id': 'id-1234'},
+ body=''
+ )
+
+ @classmethod
+ def set_requestFunc(cls, fn):
+ cls.invoke_request = fn
+
+ @classmethod
+ def set_responseFunc(cls, fn):
+ cls.invoke_response = fn
+
+ @classmethod
+ def response_403_InvalidAccessKeyId(cls) -> MockAsyncHttpResponse:
+ err_xml = r'''
+
+ InvalidAccessKeyId
+ The OSS Access Key Id you provided does not exist in our records.
+ id-1234
+ oss-cn-hangzhou.aliyuncs.com
+ ak
+ 0002-00000902
+ https://api.aliyun.com/troubleshoot?q=0002-00000902
+
+ '''
+ return MockAsyncHttpResponse(
+ status_code=403,
+ reason='Forbidden',
+ headers={
+ 'Server': 'AliyunOSS',
+ 'Date': 'Tue, 23 Jul 2024 13:01:06 GMT',
+ 'Content-Type': 'application/xml',
+ 'x-oss-ec': '0002-00000902',
+ 'x-oss-request-id': 'id-1234',
+ },
+ body=err_xml.encode()
+ )
+
+
diff --git a/tests/unit/aio/operations/test_bucket_basic.py b/tests/unit/aio/operations/test_bucket_basic.py
new file mode 100644
index 0000000..d52201c
--- /dev/null
+++ b/tests/unit/aio/operations/test_bucket_basic.py
@@ -0,0 +1,423 @@
+# pylint: skip-file
+from typing import cast
+import xml.etree.ElementTree as ET
+from alibabacloud_oss_v2 import exceptions
+from alibabacloud_oss_v2.models import bucket_basic as model
+from alibabacloud_oss_v2.aio.operations import bucket_basic as operations
+from . import TestOperations
+
+class TestBucketBasic(TestOperations):
+
+ async def test_put_bucket(self):
+ request = model.PutBucketRequest(
+ bucket='bucket',
+ acl='private',
+ resource_group_id='rg-id',
+ create_bucket_configuration=model.CreateBucketConfiguration(
+ storage_class='Standard'
+ ),
+ )
+
+ result = await operations.put_bucket(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertIn('private', self.request_dump.headers.get('x-oss-acl'))
+ self.assertIn('rg-id', self.request_dump.headers.get('x-oss-resource-group-id'))
+
+ root = ET.fromstring(self.request_dump.body)
+ self.assertEqual('CreateBucketConfiguration', root.tag)
+ self.assertEqual('Standard', root.findtext('StorageClass'))
+ self.assertEqual(None, root.findtext('DataRedundancyType'))
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual('id-1234', result.request_id)
+
+ async def test_put_bucket_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.PutBucketRequest(
+ bucket='bucket',
+ acl='private',
+ resource_group_id='rg-id',
+ )
+
+ try:
+ result = await operations.put_bucket(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertIn('private', self.request_dump.headers.get('x-oss-acl'))
+ self.assertIn('rg-id', self.request_dump.headers.get('x-oss-resource-group-id'))
+
+ async def test_put_bucket_acl(self):
+ request = model.PutBucketAclRequest(
+ bucket='bucket',
+ acl='private',
+ )
+
+ result = await operations.put_bucket_acl(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertIn('private', self.request_dump.headers.get('x-oss-acl'))
+
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_put_bucket_acl_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.PutBucketAclRequest(
+ bucket='bucket',
+ acl='private',
+ )
+
+ try:
+ result = await operations.put_bucket_acl(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertIn('private', self.request_dump.headers.get('x-oss-acl'))
+
+ async def test_get_bucket_acl(self):
+ request = model.GetBucketAclRequest(
+ bucket='bucket',
+ )
+
+ result = await operations.get_bucket_acl(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_get_bucket_acl_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetBucketAclRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.get_bucket_acl(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_list_objects_v2(self):
+ request = model.ListObjectsV2Request(
+ bucket='example-bucket',
+ delimiter='/',
+ start_after='b',
+ encoding_type='url',
+ continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ fetch_owner=True,
+ request_payer='requester',
+ )
+
+ result = await operations.list_objects_v2(self.client, request)
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&list-type=2&delimiter=%2F&start-after=b&continuation-token=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa&fetch-owner=true', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_list_objects_v2_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.ListObjectsV2Request(
+ bucket='example-bucket',
+ delimiter='/',
+ start_after='b',
+ encoding_type='url',
+ continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ fetch_owner=True,
+ request_payer='requester',
+ )
+
+ try:
+ result = await operations.list_objects_v2(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&list-type=2&delimiter=%2F&start-after=b&continuation-token=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa&fetch-owner=true', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_get_bucket_stat(self):
+ request = model.GetBucketStatRequest(
+ bucket='bucket',
+ )
+
+ result = await operations.get_bucket_stat(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?stat=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_get_bucket_stat_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetBucketStatRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.get_bucket_stat(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?stat=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_list_objects(self):
+ request = model.ListObjectsRequest(
+ bucket='example-bucket',
+ delimiter='/',
+ encoding_type='url',
+ marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ request_payer='requester',
+ )
+
+ result = await operations.list_objects(self.client, request)
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&delimiter=%2F&marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+ self.assertIn('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_list_objects_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.ListObjectsRequest(
+ bucket='example-bucket',
+ delimiter='/',
+ encoding_type='url',
+ marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ request_payer='requester',
+ )
+
+ try:
+ result = await operations.list_objects(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&delimiter=%2F&marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+ self.assertIn('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_get_bucket_info(self):
+ request = model.GetBucketInfoRequest(
+ bucket='bucket',
+ )
+
+ result = await operations.get_bucket_info(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?bucketInfo=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_get_bucket_info_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetBucketInfoRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.get_bucket_info(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?bucketInfo=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_get_bucket_location(self):
+ request = model.GetBucketInfoRequest(
+ bucket='bucket',
+ )
+
+ result = await operations.get_bucket_location(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?location=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_get_bucket_location_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetBucketInfoRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.get_bucket_location(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?location=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_put_bucket_versioning(self):
+ request = model.PutBucketVersioningRequest(
+ bucket='bucket',
+ versioning_configuration=model.VersioningConfiguration(
+ status='Enabled'
+ )
+ )
+
+ xml_data = r'''Enabled'''
+
+ result = await operations.put_bucket_versioning(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual(xml_data.encode(), self.request_dump.body)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_put_bucket_versioning_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.PutBucketVersioningRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.put_bucket_versioning(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+
+ async def test_get_bucket_versioning(self):
+ request = model.GetBucketVersioningRequest(
+ bucket='bucket',
+ )
+
+ result = await operations.get_bucket_versioning(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_get_bucket_versioning_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetBucketVersioningRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.get_bucket_versioning(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+
+ async def test_list_object_versions(self):
+ request = model.ListObjectVersionsRequest(
+ bucket='example-bucket',
+ encoding_type='url',
+ delimiter='/',
+ key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1',
+ request_payer='requester',
+ )
+
+ result = await operations.list_object_versions(self.client, request)
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?versions=&delimiter=%2F&key-marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&version-id-marker=CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1&max-keys=10&prefix=aaa&encoding-type=url', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_list_object_versions_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.ListObjectVersionsRequest(
+ bucket='example-bucket',
+ encoding_type='url',
+ delimiter='/',
+ key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_keys=10,
+ prefix='aaa',
+ version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1',
+ request_payer='requester',
+ )
+
+ try:
+ result = await operations.list_object_versions(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?versions=&delimiter=%2F&key-marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&version-id-marker=CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1&max-keys=10&prefix=aaa&encoding-type=url', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
diff --git a/tests/unit/aio/operations/test_object_basic.py b/tests/unit/aio/operations/test_object_basic.py
new file mode 100644
index 0000000..57cc234
--- /dev/null
+++ b/tests/unit/aio/operations/test_object_basic.py
@@ -0,0 +1,954 @@
+# pylint: skip-file
+from typing import cast
+import xml.etree.ElementTree as ET
+from alibabacloud_oss_v2 import exceptions
+from alibabacloud_oss_v2.models import object_basic as model
+from alibabacloud_oss_v2.aio.operations import object_basic as operations
+from . import TestOperations
+
+class TestObjectBasic(TestOperations):
+
+ async def test_put_object(self):
+ request = model.PutObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ acl='private',
+ storage_class='ColdArchive',
+ metadata={
+ "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=",
+ "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=",
+ "client-side-encryption-cek-alg": "AES/CTR/NoPadding",
+ "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding",
+ },
+ cache_control='no-cache',
+ content_disposition='attachment',
+ content_encoding='utf-8',
+ content_length=101,
+ content_md5='B5eJF1ptWaXm4bijSPyxw==',
+ content_type='application/octet-stream',
+ expires='2022-10-12T00:00:00.000Z',
+ server_side_encryption='SM4',
+ server_side_data_encryption='KMS',
+ server_side_encryption_key_id='9468da86-3509-4f8d-a61e-6eab1eac****',
+ tagging='tagging-test',
+ callback='{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}',
+ callback_var='{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}',
+ forbid_overwrite=True,
+ traffic_limit=100 * 1024 * 8,
+ request_payer='request_payer-test',
+ body='body-test',
+ progress_fn='progress_fn-test',
+ )
+
+ result = await operations.put_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+ self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class'))
+ self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key'))
+ self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start'))
+ self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg'))
+ self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg'))
+ self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control'))
+ self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition'))
+ self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding'))
+ self.assertEqual(101, int(self.request_dump.headers.get('Content-Length')))
+ self.assertEqual('B5eJF1ptWaXm4bijSPyxw==', self.request_dump.headers.get('Content-MD5'))
+ self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type'))
+ self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires'))
+ self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption'))
+ self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption'))
+ self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id'))
+ self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging'))
+ self.assertEqual('{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', self.request_dump.headers.get('x-oss-callback'))
+ self.assertEqual('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', self.request_dump.headers.get('x-oss-callback-var'))
+ self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite')))
+ self.assertEqual(100 * 1024 * 8, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual('id-1234', result.request_id)
+
+ async def test_put_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.PutObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ acl='private',
+ storage_class='ColdArchive',
+ )
+
+ try:
+ result = await operations.put_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+
+
+ async def test_head_object(self):
+ request = model.HeadObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ version_id='fba9dede5f27731c9771645a3986',
+ if_match='D41D8CD98F00B204E9800998ECF8****',
+ if_none_match='D41D8CD98F00B204E9800998ECF9****',
+ if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT',
+ if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT',
+ request_payer='request_payer-test',
+ )
+
+ result = await operations.head_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?versionId=fba9dede5f27731c9771645a3986', self.request_dump.url)
+ self.assertEqual('HEAD', self.request_dump.method)
+ self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('If-Match'))
+ self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('If-None-Match'))
+ self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('If-Modified-Since'))
+ self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('If-Unmodified-Since'))
+ self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_head_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.HeadObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ )
+
+ try:
+ result = await operations.head_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url)
+ self.assertEqual('HEAD', self.request_dump.method)
+
+
+ async def test_get_object(self):
+ request = model.GetObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ if_match='D41D8CD98F00B204E9800998ECF8****',
+ if_none_match='D41D8CD98F00B204E9800998ECF9****',
+ if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT',
+ if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT',
+ range_header='bytes 0~9/44',
+ range_behavior='standard',
+ response_cache_control='no-cache',
+ response_content_disposition='attachment; filename=testing.txt',
+ response_content_encoding='utf-8',
+ response_content_language='中文',
+ response_content_type='text',
+ response_expires='Fri, 24 Feb 2012 17:00:00 GMT',
+ version_id='CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY*****',
+ traffic_limit=1022,
+ process='process-test',
+ request_payer='request_payer-test',
+ progress_fn='progress_fn-test',
+ )
+
+ result = await operations.get_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?response-cache-control=no-cache&response-content-disposition=attachment%3B%20filename%3Dtesting.txt&response-content-encoding=utf-8&response-content-language=%E4%B8%AD%E6%96%87&response-content-type=text&response-expires=Fri%2C%2024%20Feb%202012%2017%3A00%3A00%20GMT&versionId=CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY%2A%2A%2A%2A%2A&x-oss-process=process-test', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+ self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('If-Match'))
+ self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('If-None-Match'))
+ self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('If-Modified-Since'))
+ self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('If-Unmodified-Since'))
+ self.assertEqual('bytes 0~9/44', self.request_dump.headers.get('Range'))
+ self.assertEqual('standard', self.request_dump.headers.get('x-oss-range-behavior'))
+ self.assertEqual(1022, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_get_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ )
+
+ try:
+ result = await operations.get_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ async def test_append_object(self):
+ request = model.AppendObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ position=10,
+ acl='private',
+ storage_class='ColdArchive',
+ metadata={
+ "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=",
+ "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=",
+ "client-side-encryption-cek-alg": "AES/CTR/NoPadding",
+ "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding",
+ },
+ cache_control='no-cache',
+ content_disposition='attachment',
+ content_encoding='utf-8',
+ content_length=101,
+ content_md5='B5eJF1ptWaXm4bijSPyx',
+ content_type='application/octet-stream',
+ expires='2022-10-12T00:00:00.000Z',
+ server_side_encryption='SM4',
+ server_side_data_encryption='KMS',
+ server_side_encryption_key_id='9468da86-3509-4f8d-a61e-6eab1eac****',
+ tagging='tagging-test',
+ forbid_overwrite=True,
+ traffic_limit=100*1024*8,
+ request_payer='requester',
+ body='xml_data',
+ progress_fn='progress_fn-test',
+ )
+
+ result = await operations.append_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?append=&position=10', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+ self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class'))
+ self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key'))
+ self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start'))
+ self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg'))
+ self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg'))
+ self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control'))
+ self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition'))
+ self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding'))
+ self.assertEqual('101', self.request_dump.headers.get('Content-Length'))
+ self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5'))
+ self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type'))
+ self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires'))
+ self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption'))
+ self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption'))
+ self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id'))
+ self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging'))
+ self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite')))
+ self.assertEqual(100*1024*8, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
+
+ async def test_append_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.AppendObjectRequest(
+ bucket='bucket',
+ key='key-test',
+ position=10,
+ )
+
+ try:
+ result = await operations.append_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?append=&position=10', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+
+ async def test_copy_object(self):
+ request = model.CopyObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ source_key='source-invalid-key',
+ source_bucket='source_bucket-test',
+ source_version_id='source_version_id-test',
+ if_match='D41D8CD98F00B204E9800998ECF8****',
+ if_none_match='D41D8CD98F00B204E9800998ECF9****',
+ if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT',
+ if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT',
+ acl='private',
+ storage_class='ColdArchive',
+ metadata={
+ "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=",
+ "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=",
+ "client-side-encryption-cek-alg": "AES/CTR/NoPadding",
+ "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding",
+ },
+ cache_control='no-cache',
+ content_disposition='attachment',
+ content_encoding='utf-8',
+ content_length=101,
+ content_md5='B5eJF1ptWaXm4bijSPyx',
+ content_type='application/octet-stream',
+ expires='2022-10-12T00:00:00.000Z',
+ metadata_directive='metadata_directive-test',
+ server_side_encryption='SM4',
+ server_side_data_encryption='KMS',
+ server_side_encryption_key_id='9468da86-3509-4f8d-a61e-6eab1eac****',
+ tagging='tagging-test',
+ tagging_directive='tagging_directive-test',
+ forbid_overwrite=True,
+ traffic_limit=100*1024*8,
+ request_payer='requester',
+ progress_fn='progress_fn-test',
+ )
+
+ result = await operations.copy_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('x-oss-copy-source-if-match'))
+ self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('x-oss-copy-source-if-none-match'))
+ self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-modified-since'))
+ self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-unmodified-since'))
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+ self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class'))
+ self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key'))
+ self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start'))
+ self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg'))
+ self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg'))
+ self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control'))
+ self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition'))
+ self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding'))
+ self.assertEqual(101, int(self.request_dump.headers.get('Content-Length')))
+ self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5'))
+ self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type'))
+ self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires'))
+ self.assertEqual('metadata_directive-test', self.request_dump.headers.get('x-oss-metadata-directive'))
+ self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption'))
+ self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption'))
+ self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id'))
+ self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging'))
+ self.assertEqual('tagging_directive-test', self.request_dump.headers.get('x-oss-tagging-directive'))
+ self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite')))
+ self.assertEqual(100*1024*8, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_copy_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.CopyObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ source_key='source-invalid-key',
+ )
+
+ try:
+ result = await operations.copy_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg',self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+
+ async def test_delete_object(self):
+ request = model.DeleteObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ request_payer='requester',
+ )
+
+ result = await operations.delete_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('DELETE', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_delete_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.DeleteObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ )
+
+ try:
+ result = await operations.delete_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg', self.request_dump.url)
+ self.assertEqual('DELETE', self.request_dump.method)
+
+ async def test_delete_multiple_objects(self):
+ request = model.DeleteMultipleObjectsRequest(
+ bucket='bucket',
+ objects=[model.DeleteObject(
+ key='key1',
+ version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ ), model.DeleteObject(
+ key='key2',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****',
+ )],
+ encoding_type='url',
+ content_length=101,
+ quiet=True,
+ request_payer='requester',
+ )
+
+ result = await operations.delete_multiple_objects(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?delete=&encoding-type=url', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_delete_multiple_objects_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.DeleteMultipleObjectsRequest(
+ bucket='bucket',
+ objects=[model.DeleteObject(
+ key='key1',
+ version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ ), model.DeleteObject(
+ key='key2',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****',
+ )],
+ )
+
+ try:
+ result = await operations.delete_multiple_objects(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?delete=&encoding-type=url', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+
+ async def test_get_object_meta(self):
+ request = model.GetObjectMetaRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ request_payer='requester',
+ )
+
+ result = await operations.get_object_meta(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?objectMeta=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('HEAD', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_get_object_meta_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetObjectMetaRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ )
+
+ try:
+ result = await operations.get_object_meta(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?objectMeta=', self.request_dump.url)
+ self.assertEqual('HEAD', self.request_dump.method)
+
+ async def test_restore_object(self):
+ request = model.RestoreObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ restore_request=model.RestoreRequest(
+ days=7,
+ tier='Expedited',
+ ),
+ request_payer='requester',
+ )
+ result = await operations.restore_object(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?restore=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ root = ET.fromstring(self.request_dump.body)
+ self.assertEqual('RestoreRequest', root.tag)
+ self.assertEqual(7, int(root.findtext('Days')))
+ self.assertEqual('Expedited', root.findtext('JobParameters.Tier'))
+
+ async def test_restore_object_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.RestoreObjectRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ )
+
+ try:
+ result = await operations.restore_object(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?restore=', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+
+ async def test_put_object_acl(self):
+ request = model.PutObjectAclRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ acl='private',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ request_payer='requester',
+ )
+
+ result = await operations.put_object_acl(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?acl=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_put_object_acl_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.PutObjectAclRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ acl='private',
+ )
+
+ try:
+ result = await operations.put_object_acl(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?acl=', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+
+
+ async def test_get_object_acl(self):
+ request = model.GetObjectAclRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ request_payer='requester',
+ )
+
+ result = await operations.get_object_acl(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?acl=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ async def test_get_object_acl_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.GetObjectAclRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ )
+
+ try:
+ result = await operations.get_object_acl(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?acl=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+
+ async def test_initiate_multipart_upload(self):
+ request = model.InitiateMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ encoding_type='url',
+ storage_class='ColdArchive',
+ metadata={
+ "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=",
+ "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=",
+ "client-side-encryption-cek-alg": "AES/CTR/NoPadding",
+ "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding",
+ },
+ cache_control='no-cache',
+ content_disposition='attachment',
+ content_encoding='utf-8',
+ content_length=101,
+ content_md5='B5eJF1ptWaXm4bijSPyx',
+ content_type='application/octet-stream',
+ expires='2022-10-12T00:00:00.000Z',
+ server_side_encryption='SM4',
+ server_side_data_encryption='KMS',
+ server_side_encryption_key_id='9468da86-3509-4f8d-a61e-6eab1eac****',
+ tagging='tagging-test',
+ forbid_overwrite=True,
+ request_payer='requester',
+ cse_data_size=26446,
+ cse_part_size=6298,
+ )
+
+ result = await operations.initiate_multipart_upload(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?uploads=&encoding-type=url', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+ self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class'))
+ self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key'))
+ self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start'))
+ self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg'))
+ self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg'))
+ self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control'))
+ self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition'))
+ self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding'))
+ self.assertEqual('101', self.request_dump.headers.get('Content-Length'))
+ self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5'))
+ self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type'))
+ self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires'))
+ self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption'))
+ self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption'))
+ self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id'))
+ self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging'))
+ self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_initiate_multipart_upload_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.InitiateMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ )
+
+ try:
+ result = await operations.initiate_multipart_upload(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?uploads=&encoding-type=url', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+
+
+ async def test_upload_part(self):
+ request = model.UploadPartRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ part_number=1,
+ upload_id='0004B9895DBBB6EC9****',
+ content_md5='B5eJF1ptWaXm4bijSPyx',
+ content_length=101,
+ traffic_limit=100*1024*8,
+ body='xml_data',
+ request_payer='requester',
+ progress_fn='progress_fn-test',
+ cse_multipart_context='cse_multipart_context-test',
+ )
+
+ result = await operations.upload_part(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?partNumber=1&uploadId=0004B9895DBBB6EC9%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5'))
+ self.assertEqual(101, int(self.request_dump.headers.get('Content-Length')))
+ self.assertEqual(100*1024*8, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_upload_part_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.UploadPartRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ part_number=1,
+ upload_id='0004B9895DBBB6EC9****',
+ )
+
+ try:
+ result = await operations.upload_part(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?partNumber=1&uploadId=0004B9895DBBB6EC9%2A%2A%2A%2A', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+
+
+ async def test_upload_part_copy(self):
+ request = model.UploadPartCopyRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ part_number=1,
+ upload_id='0004B9895DBBB6EC9',
+ source_key='source-invalid-key',
+ source_bucket='source_bucket-test',
+ source_version_id='source_version_id-test',
+ source_range='source_range-test',
+ if_match='D41D8CD98F00B204E9800998ECF8****',
+ if_none_match='D41D8CD98F00B204E9800998ECF9****',
+ if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT',
+ if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT',
+ traffic_limit=100*1024*8,
+ request_payer='requester',
+ )
+
+ result = await operations.upload_part_copy(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?partNumber=1&uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+ self.assertEqual('source_range-test', self.request_dump.headers.get('x-oss-copy-source-range'))
+ self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('x-oss-copy-source-if-match'))
+ self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('x-oss-copy-source-if-none-match'))
+ self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-modified-since'))
+ self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-unmodified-since'))
+ self.assertEqual(100 * 1024 * 8, int(self.request_dump.headers.get('x-oss-traffic-limit')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_upload_part_copy_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.UploadPartCopyRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ part_number=1,
+ upload_id='0004B9895DBBB6EC9',
+ source_key='source-invalid-key',
+ )
+
+ try:
+ result = await operations.upload_part_copy(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?partNumber=1&uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('PUT', self.request_dump.method)
+
+
+ async def test_complete_multipart_upload(self):
+ request = model.CompleteMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ acl='private',
+ complete_multipart_upload=model.CompleteMultipartUpload(
+ parts=[model.UploadPart(
+ part_number=1,
+ etag='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****',
+ ), model.UploadPart(
+ part_number=2,
+ etag='jOTRmNTE5NmU5NmFhZjhjYmY0****',
+ )],
+ ),
+ complete_all='complete_all-test',
+ callback='{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}',
+ callback_var='{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}',
+ forbid_overwrite=True,
+ encoding_type='url',
+ request_payer='requester',
+ )
+
+ result = await operations.complete_multipart_upload(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?encoding-type=url&uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+ self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl'))
+ self.assertEqual('jOTRmNTE5NmU5NmFhZjhjYmY0****', request.complete_multipart_upload.parts[1].etag)
+ self.assertEqual('complete_all-test', self.request_dump.headers.get('x-oss-complete-all'))
+ self.assertEqual('{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', self.request_dump.headers.get('x-oss-callback'))
+ self.assertEqual('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', self.request_dump.headers.get('x-oss-callback-var'))
+ self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite')))
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+ root = ET.fromstring(self.request_dump.body)
+ self.assertEqual('CompleteMultipartUpload', root.tag)
+ self.assertEqual(1, int(root.findall('Part')[0].findtext('PartNumber')))
+ self.assertEqual('ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', root.findall('Part')[0].findtext('ETag'))
+ self.assertEqual(2, int(root.findall('Part')[1].findtext('PartNumber')))
+ self.assertEqual('jOTRmNTE5NmU5NmFhZjhjYmY0****', root.findall('Part')[1].findtext('ETag'))
+
+ async def test_complete_multipart_upload_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.CompleteMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ )
+
+ try:
+ result = await operations.complete_multipart_upload(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?encoding-type=url&uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('POST', self.request_dump.method)
+
+
+ async def test_abort_multipart_upload(self):
+ request = model.AbortMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ request_payer='requester',
+ )
+
+ result = await operations.abort_multipart_upload(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('DELETE', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_abort_multipart_upload_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.AbortMultipartUploadRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ )
+
+ try:
+ result = await operations.abort_multipart_upload(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('DELETE', self.request_dump.method)
+
+
+
+ async def test_list_multipart_uploads(self):
+ request = model.ListMultipartUploadsRequest(
+ bucket='bucket',
+ delimiter='/',
+ encoding_type='url',
+ key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA',
+ max_uploads=90009,
+ prefix='aaa',
+ upload_id_marker='upload_id_marker-test',
+ request_payer='requester',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ initiated='initiated-test',
+ )
+
+ result = await operations.list_multipart_uploads(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&uploads=&delimiter=%2F&key-marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-uploads=90009&prefix=aaa&upload-id-marker=upload_id_marker-test', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+
+ async def test_list_multipart_uploads_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.ListMultipartUploadsRequest(
+ bucket='bucket',
+ )
+
+ try:
+ result = await operations.list_multipart_uploads(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&uploads=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+
+ async def test_list_parts(self):
+ request = model.ListPartsRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ encoding_type='url',
+ max_parts=12,
+ part_number_marker='part_number_marker-test',
+ request_payer='requester',
+ part_number='1',
+ etag='"D41D8CD98F00B204E9800998ECF8****"',
+ last_modified='datetime.datetime.fromtimestamp(1702743657)',
+ size='344606',
+ hash_crc64='316181249502703****',
+ )
+
+ result = await operations.list_parts(self.client, request)
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?encoding-type=url&uploadId=0004B9895DBBB6EC9&max-parts=12&part-number-marker=part_number_marker-test', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+ self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer'))
+
+
+ async def test_list_parts_fail(self):
+ self.set_responseFunc(self.response_403_InvalidAccessKeyId)
+ request = model.ListPartsRequest(
+ bucket='bucket',
+ key='example-object-2.jpg',
+ upload_id='0004B9895DBBB6EC9',
+ )
+
+ try:
+ result = await operations.list_parts(self.client, request)
+ self.fail('should not here')
+ except exceptions.OperationError as ope:
+ self.assertIsInstance(ope.unwrap(), exceptions.ServiceError)
+ serr = cast(exceptions.ServiceError, ope.unwrap())
+ self.assertEqual(403, serr.status_code)
+ self.assertEqual('id-1234', serr.request_id)
+ self.assertEqual('InvalidAccessKeyId', serr.code)
+
+ self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?encoding-type=url&uploadId=0004B9895DBBB6EC9', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+
diff --git a/tests/unit/aio/operations/test_region.py b/tests/unit/aio/operations/test_region.py
new file mode 100644
index 0000000..bf316bd
--- /dev/null
+++ b/tests/unit/aio/operations/test_region.py
@@ -0,0 +1,15 @@
+# pylint: skip-file
+from typing import cast
+from alibabacloud_oss_v2.models import region as model
+from alibabacloud_oss_v2.aio.operations import region as operations
+from . import TestOperations
+
+class TestRegion(TestOperations):
+ async def test_describe_regions(self):
+ request = model.DescribeRegionsRequest()
+ result = await operations.describe_regions(self.client, request)
+ self.assertEqual('https://oss-cn-hangzhou.aliyuncs.com/?regions=', self.request_dump.url)
+ self.assertEqual('GET', self.request_dump.method)
+
+ self.assertEqual(200, result.status_code)
+ self.assertEqual('OK', result.status)
\ No newline at end of file
diff --git a/tests/unit/aio/operations/test_service.py b/tests/unit/aio/operations/test_service.py
new file mode 100644
index 0000000..724e648
--- /dev/null
+++ b/tests/unit/aio/operations/test_service.py
@@ -0,0 +1,8 @@
+# pylint: skip-file
+from typing import cast
+from alibabacloud_oss_v2.models import service as model
+from alibabacloud_oss_v2.aio.operations import region as operations
+from . import TestOperations
+
+class TestService(TestOperations):
+ """"""
\ No newline at end of file