From 63c6578ed0db4b4a40d73964874de229d62d9c83 Mon Sep 17 00:00:00 2001 From: zhuxiaolong37 Date: Mon, 10 Feb 2025 17:22:46 +0800 Subject: [PATCH 1/6] add clean restored and fix some bug --- alibabacloud_oss_v2/client.py | 81 ++++---- alibabacloud_oss_v2/models/bucket_basic.py | 8 + .../models/bucket_inventory.py | 1 + alibabacloud_oss_v2/models/object_basic.py | 45 ++++- .../operations/object_basic.py | 44 ++++- .../test_clean_restored_object_client.py | 174 ++++++++++++++++++ tests/unit/models/test_bucket_basic.py | 14 +- tests/unit/models/test_bucket_inventory.py | 2 + .../unit/models/test_clean_restored_object.py | 72 ++++++++ tests/unit/models/test_object_basic.py | 36 +++- 10 files changed, 435 insertions(+), 42 deletions(-) create mode 100644 tests/integration/test_clean_restored_object_client.py create mode 100644 tests/unit/models/test_clean_restored_object.py diff --git a/alibabacloud_oss_v2/client.py b/alibabacloud_oss_v2/client.py index 375fd82..e0da51e 100644 --- a/alibabacloud_oss_v2/client.py +++ b/alibabacloud_oss_v2/client.py @@ -65,7 +65,7 @@ def list_buckets(self, request: models.ListBucketsRequest, **kwargs request (ListBucketsRequest): Request parameters for ListBuckets operation. Returns: - ListBucketsResult: Reponse result for ListBuckets operation. + ListBucketsResult: Response result for ListBuckets operation. """ return operations.list_buckets(self._client, request, **kwargs) @@ -97,7 +97,7 @@ def put_bucket(self, request: models.PutBucketRequest, **kwargs request (PutBucketRequest): Request parameters for PutBucket operation. Returns: - PutBucketResult: Reponse result for PutBucket operation. + PutBucketResult: Response result for PutBucket operation. """ return operations.put_bucket(self._client, request, **kwargs) @@ -111,7 +111,7 @@ def delete_bucket(self, request: models.DeleteBucketRequest, **kwargs request (DeleteBucketRequest): Request parameters for DeleteBucket operation. Returns: - DeleteBucketResult: Reponse result for DeleteBucket operation. + DeleteBucketResult: Response result for DeleteBucket operation. """ return operations.delete_bucket(self._client, request, **kwargs) @@ -125,7 +125,7 @@ def list_objects(self, request: models.ListObjectsRequest, **kwargs request (ListObjectsRequest): Request parameters for ListObjects operation. Returns: - ListObjectsResult: Reponse result for ListObjects operation. + ListObjectsResult: Response result for ListObjects operation. """ return operations.list_objects(self._client, request, **kwargs) @@ -168,7 +168,7 @@ def list_objects_v2(self, request: models.ListObjectsV2Request, **kwargs request (ListObjectsV2Request): Request parameters for ListObjectsV2 operation. Returns: - ListObjectsV2Result: Reponse result for ListObjectsV2 operation. + ListObjectsV2Result: Response result for ListObjectsV2 operation. """ return operations.list_objects_v2(self._client, request, **kwargs) @@ -253,7 +253,7 @@ def list_object_versions(self, request: models.ListObjectVersionsRequest, **kwar request (ListObjectVersionsRequest): Request parameters for ListObjectVersions operation. Returns: - ListObjectVersionsResult: Reponse result for ListObjectVersions operation. + ListObjectVersionsResult: Response result for ListObjectVersions operation. """ return operations.list_object_versions(self._client, request, **kwargs) @@ -268,7 +268,7 @@ def put_object(self, request: models.PutObjectRequest, **kwargs request (PutObjectRequest): Request parameters for PutObject operation. Returns: - PutObjectResult: Reponse result for PutObject operation. + PutObjectResult: Response result for PutObject operation. """ return operations.put_object(self._client, request, **kwargs) @@ -282,7 +282,7 @@ def get_object(self, request: models.GetObjectRequest, **kwargs request (GetObjectRequest): Request parameters for GetObject operation. Returns: - GetObjectResult: Reponse result for GetObject operation. + GetObjectResult: Response result for GetObject operation. """ return operations.get_object(self._client, request, **kwargs) @@ -296,7 +296,7 @@ def copy_object(self, request: models.CopyObjectRequest, **kwargs request (CopyObjectRequest): Request parameters for CopyObject operation. Returns: - CopyObjectResult: Reponse result for CopyObject operation. + CopyObjectResult: Response result for CopyObject operation. """ return operations.copy_object(self._client, request, **kwargs) @@ -311,7 +311,7 @@ def append_object(self, request: models.AppendObjectRequest, **kwargs request (AppendObjectRequest): Request parameters for AppendObject operation. Returns: - AppendObjectResult: Reponse result for AppendObject operation. + AppendObjectResult: Response result for AppendObject operation. """ return operations.append_object(self._client, request, **kwargs) @@ -325,21 +325,21 @@ def delete_object(self, request: models.DeleteObjectRequest, **kwargs request (DeleteObjectRequest): Request parameters for DeleteObject operation. Returns: - DeleteObjectResult: Reponse result for DeleteObject operation. + DeleteObjectResult: Response result for DeleteObject operation. """ return operations.delete_object(self._client, request, **kwargs) - def delete_multiple_objects(self, request: models.DeleteMultipleObjectsResult, **kwargs + def delete_multiple_objects(self, request: models.DeleteMultipleObjectsRequest, **kwargs ) -> models.DeleteMultipleObjectsResult: """ Deletes multiple objects from a bucket. Args: - request (DeleteMultipleObjectsResult): Request parameters for DeleteMultipleObjects operation. + request (DeleteMultipleObjectsRequest): Request parameters for DeleteMultipleObjects operation. Returns: - DeleteMultipleObjectsResult: Reponse result for DeleteMultipleObjects operation. + DeleteMultipleObjectsResult: Response result for DeleteMultipleObjects operation. """ return operations.delete_multiple_objects(self._client, request, **kwargs) @@ -353,7 +353,7 @@ def head_object(self, request: models.HeadObjectRequest, **kwargs request (HeadObjectRequest): Request parameters for HeadObject operation. Returns: - HeadObjectResult: Reponse result for HeadObject operation. + HeadObjectResult: Response result for HeadObject operation. """ return operations.head_object(self._client, request, **kwargs) @@ -367,7 +367,7 @@ def get_object_meta(self, request: models.GetObjectMetaRequest, **kwargs request (GetObjectMetaRequest): Request parameters for GetObjectMeta operation. Returns: - GetObjectMetaResult: Reponse result for GetObjectMeta operation. + GetObjectMetaResult: Response result for GetObjectMeta operation. """ return operations.get_object_meta(self._client, request, **kwargs) @@ -381,7 +381,7 @@ def restore_object(self, request: models.RestoreObjectRequest, **kwargs request (RestoreObjectRequest): Request parameters for RestoreObject operation. Returns: - RestoreObjectResult: Reponse result for RestoreObject operation. + RestoreObjectResult: Response result for RestoreObject operation. """ return operations.restore_object(self._client, request, **kwargs) @@ -395,7 +395,7 @@ def put_object_acl(self, request: models.PutObjectAclRequest, **kwargs request (PutObjectAclRequest): Request parameters for PutObjectAcl operation. Returns: - PutObjectAclResult: Reponse result for PutObjectAcl operation. + PutObjectAclResult: Response result for PutObjectAcl operation. """ return operations.put_object_acl(self._client, request, **kwargs) @@ -409,7 +409,7 @@ def get_object_acl(self, request: models.GetObjectAclRequest, **kwargs request (GetObjectAclRequest): Request parameters for GetObjectAcl operation. Returns: - GetObjectAclResult: Reponse result for GetObjectAcl operation. + GetObjectAclResult: Response result for GetObjectAcl operation. """ return operations.get_object_acl(self._client, request, **kwargs) @@ -423,7 +423,7 @@ def initiate_multipart_upload(self, request: models.InitiateMultipartUploadReque request (InitiateMultipartUploadRequest): Request parameters for InitiateMultipartUpload operation. Returns: - InitiateMultipartUploadResult: Reponse result for InitiateMultipartUpload operation. + InitiateMultipartUploadResult: Response result for InitiateMultipartUpload operation. """ return operations.initiate_multipart_upload(self._client, request, **kwargs) @@ -437,7 +437,7 @@ def upload_part(self, request: models.UploadPartRequest, **kwargs request (UploadPartRequest): Request parameters for UploadPart operation. Returns: - UploadPartResult: Reponse result for UploadPart operation. + UploadPartResult: Response result for UploadPart operation. """ return operations.upload_part(self._client, request, **kwargs) @@ -452,7 +452,7 @@ def upload_part_copy(self, request: models.UploadPartCopyRequest, **kwargs request (UploadPartCopyRequest): Request parameters for UploadPartCopy operation. Returns: - UploadPartCopyResult: Reponse result for UploadPartCopy operation. + UploadPartCopyResult: Response result for UploadPartCopy operation. """ return operations.upload_part_copy(self._client, request, **kwargs) @@ -466,7 +466,7 @@ def complete_multipart_upload(self, request: models.CompleteMultipartUploadReque request (CompleteMultipartUploadRequest): Request parameters for CompleteMultipartUpload operation. Returns: - CompleteMultipartUploadResult: Reponse result for CompleteMultipartUpload operation. + CompleteMultipartUploadResult: Response result for CompleteMultipartUpload operation. """ return operations.complete_multipart_upload(self._client, request, **kwargs) @@ -480,7 +480,7 @@ def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, ** request (AbortMultipartUploadRequest): Request parameters for AbortMultipartUpload operation. Returns: - AbortMultipartUploadResult: Reponse result for AbortMultipartUpload operation. + AbortMultipartUploadResult: Response result for AbortMultipartUpload operation. """ return operations.abort_multipart_upload(self._client, request, **kwargs) @@ -494,7 +494,7 @@ def list_multipart_uploads(self, request: models.ListMultipartUploadsRequest, ** request (ListMultipartUploadsRequest): Request parameters for ListMultipartUploads operation. Returns: - ListMultipartUploadsResult: Reponse result for ListMultipartUploads operation. + ListMultipartUploadsResult: Response result for ListMultipartUploads operation. """ return operations.list_multipart_uploads(self._client, request, **kwargs) @@ -508,7 +508,7 @@ def list_parts(self, request: models.ListPartsRequest, **kwargs request (ListPartsRequest): Request parameters for ListParts operation. Returns: - ListPartsResult: Reponse result for ListParts operation. + ListPartsResult: Response result for ListParts operation. """ return operations.list_parts(self._client, request, **kwargs) @@ -523,7 +523,7 @@ def put_symlink(self, request: models.PutSymlinkRequest, **kwargs request (PutSymlinkRequest): Request parameters for PutSymlink operation. Returns: - PutSymlinkResult: Reponse result for PutSymlink operation. + PutSymlinkResult: Response result for PutSymlink operation. """ return operations.put_symlink(self._client, request, **kwargs) @@ -537,7 +537,7 @@ def get_symlink(self, request: models.GetSymlinkRequest, **kwargs request (GetSymlinkRequest): Request parameters for GetSymlink operation. Returns: - GetSymlinkResult: Reponse result for GetSymlink operation. + GetSymlinkResult: Response result for GetSymlink operation. """ return operations.get_symlink(self._client, request, **kwargs) @@ -551,7 +551,7 @@ def put_object_tagging(self, request: models.PutObjectTaggingRequest, **kwargs request (PutObjectTaggingRequest): Request parameters for PutObjectTagging operation. Returns: - PutObjectTaggingResult: Reponse result for PutObjectTagging operation. + PutObjectTaggingResult: Response result for PutObjectTagging operation. """ return operations.put_object_tagging(self._client, request, **kwargs) @@ -565,7 +565,7 @@ def get_object_tagging(self, request: models.GetObjectTaggingRequest, **kwargs request (GetObjectTaggingRequest): Request parameters for GetObjectTagging operation. Returns: - GetObjectTaggingResult: Reponse result for GetObjectTagging operation. + GetObjectTaggingResult: Response result for GetObjectTagging operation. """ return operations.get_object_tagging(self._client, request, **kwargs) @@ -579,7 +579,7 @@ def delete_object_tagging(self, request: models.DeleteObjectTaggingRequest, **kw request (DeleteObjectTaggingRequest): Request parameters for DeleteObjectTagging operation. Returns: - DeleteObjectTaggingResult: Reponse result for DeleteObjectTagging operation. + DeleteObjectTaggingResult: Response result for DeleteObjectTagging operation. """ return operations.delete_object_tagging(self._client, request, **kwargs) @@ -593,7 +593,7 @@ def process_object(self, request: models.ProcessObjectRequest, **kwargs request (ProcessObjectRequest): Request parameters for ProcessObject operation. Returns: - ProcessObjectResult: Reponse result for ProcessObject operation. + ProcessObjectResult: Response result for ProcessObject operation. """ return operations.process_object(self._client, request, **kwargs) @@ -607,7 +607,7 @@ def async_process_object(self, request: models.AsyncProcessObjectRequest, **kwar request (AsyncProcessObjectRequest): Request parameters for AsyncProcessObject operation. Returns: - AsyncProcessObjectResult: Reponse result for AsyncProcessObject operation. + AsyncProcessObjectResult: Response result for AsyncProcessObject operation. """ return operations.async_process_object(self._client, request, **kwargs) @@ -635,7 +635,7 @@ def list_objects_paginator(self, **kwargs) -> ListObjectsPaginator: """ return ListObjectsPaginator(self, **kwargs) - def list_objects_v2_paginator(self, **kwargs) -> ListObjectsPaginator: + def list_objects_v2_paginator(self, **kwargs) -> ListObjectsV2Paginator: """Creates a paginator for ListObjectsV2 Returns: @@ -2148,3 +2148,16 @@ def put_bucket_https_config(self, request: models.PutBucketHttpsConfigRequest, * """ return operations.put_bucket_https_config(self._client, request, **kwargs) + # clean restored + def clean_restored_object(self, request: models.CleanRestoredObjectRequest, **kwargs + ) -> models.CleanRestoredObjectResult: + """ + You can call this operation to clean an object restored from Archive or Cold Archive state. After that, the restored object returns to the frozen state. + + Args: + request (CleanRestoredObjectRequest): Request parameters for CleanRestoredObject operation. + + Returns: + CleanRestoredObjectResult: Response result for CleanRestoredObject operation. + """ + return operations.clean_restored_object(self._client, request, **kwargs) \ No newline at end of file diff --git a/alibabacloud_oss_v2/models/bucket_basic.py b/alibabacloud_oss_v2/models/bucket_basic.py index 0da3833..50da493 100644 --- a/alibabacloud_oss_v2/models/bucket_basic.py +++ b/alibabacloud_oss_v2/models/bucket_basic.py @@ -131,6 +131,7 @@ class ObjectProperties(serde.Model): "storage_class": {"tag": "xml", "rename": "StorageClass"}, "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, "restore_info": {"tag": "xml", "rename": "RestoreInfo"}, + "transition_time": {"tag": "xml", "rename": "TransitionTime"}, } _dependency_map = { @@ -151,6 +152,7 @@ def __init__( storage_class: Optional[str] = None, owner: Optional[Owner] = None, restore_info: Optional[str] = None, + transition_time: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -164,6 +166,7 @@ def __init__( storage_class (str, optional): The storage class of the object. owner (str, optional): The container that stores information about the bucket owner. restore_info (Owner, optional): The restoration status of the object. + transition_time (str): The time when the storage class of the object is converted to Cold Archive or Deep Cold Archive based on lifecycle rules. """ super().__init__(**kwargs) self.key = key @@ -174,6 +177,7 @@ def __init__( self.storage_class = storage_class self.owner = owner self.restore_info = restore_info + self.transition_time = transition_time class CommonPrefix(serde.Model): @@ -1082,6 +1086,7 @@ class ObjectVersionProperties(serde.Model): "storage_class": {"tag": "xml", "rename": "StorageClass"}, "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, "restore_info": {"tag": "xml", "rename": "RestoreInfo"}, + "transition_time": {"tag": "xml", "rename": "TransitionTime"}, } _dependency_map = { @@ -1104,6 +1109,7 @@ def __init__( storage_class: Optional[str] = None, owner: Optional[Owner] = None, restore_info: Optional[str] = None, + transition_time: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1119,6 +1125,7 @@ def __init__( storage_class (str, optional): The storage class of the object. owner (str, optional): The container that stores information about the bucket owner. restore_info (Owner, optional): The restoration status of the object. + transition_time (str): The time when the storage class of the object is converted to Cold Archive or Deep Cold Archive based on lifecycle rules. """ super().__init__(**kwargs) self.key = key @@ -1131,6 +1138,7 @@ def __init__( self.storage_class = storage_class self.owner = owner self.restore_info = restore_info + self.transition_time = transition_time class DeleteMarkerProperties(serde.Model): diff --git a/alibabacloud_oss_v2/models/bucket_inventory.py b/alibabacloud_oss_v2/models/bucket_inventory.py index f62c4da..9dfd093 100644 --- a/alibabacloud_oss_v2/models/bucket_inventory.py +++ b/alibabacloud_oss_v2/models/bucket_inventory.py @@ -31,6 +31,7 @@ class InventoryOptionalFieldType(str, Enum): STORAGE_CLASS = 'StorageClass' IS_MULTIPART_UPLOADED = 'IsMultipartUploaded' ENCRYPTION_STATUS = 'EncryptionStatus' + TRANSITION_TIME = 'TransitionTime' class SSEKMS(serde.Model): diff --git a/alibabacloud_oss_v2/models/object_basic.py b/alibabacloud_oss_v2/models/object_basic.py index 29966ac..0b19b4d 100644 --- a/alibabacloud_oss_v2/models/object_basic.py +++ b/alibabacloud_oss_v2/models/object_basic.py @@ -247,6 +247,7 @@ class HeadObjectResult(serde.ResultModel): "allow_age": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Age"}, "allow_headers": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Headers"}, "expose_headers": {"tag": "output", "position": "header", "rename": "Access-Control-Expose-Headers"}, + 'transition_time': {'tag': 'output', 'position': 'header', 'rename': 'x-oss-transition-time'}, } def __init__( @@ -279,6 +280,7 @@ def __init__( allow_age: Optional[str] = None, allow_headers: Optional[str] = None, expose_headers: Optional[str] = None, + transition_time: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -324,6 +326,7 @@ def __init__( allow_age (str, optional): The maximum caching period for CORS. allow_headers (str, optional): The headers allowed for CORS. expose_headers (str, optional): The headers that can be accessed by JavaScript applications on the client. + transition_time (str, optional): The time when the storage class of the object is converted to Cold Archive or Deep Cold Archive based on lifecycle rules. """ super().__init__(**kwargs) self.content_length = content_length @@ -354,6 +357,7 @@ def __init__( self.allow_age = allow_age self.allow_headers = allow_headers self.expose_headers = expose_headers + self.transition_time = transition_time class GetObjectRequest(serde.RequestModel): """The request for the GetObject operation.""" @@ -974,7 +978,7 @@ def __init__( bucket: str = None, key: str = None, version_id: Optional[str] = None, - request_payer: Optional[str] = None, + request_payer: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1057,7 +1061,7 @@ def __init__( encoding_type: Optional[str] = None, objects: Optional[List[DeleteObject]] = None, quiet: Optional[bool] = None, - request_payer: Optional[str] = None, + request_payer: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1103,7 +1107,7 @@ def __init__( _attribute_map = { "key": {"tag": "xml", "rename": "Key"}, "version_id": {"tag": "xml", "rename": "VersionId"}, - "delete_marker": {"tag": "xml", "rename": "DeleteMarker"}, + "delete_marker": {"tag": "xml", "rename": "DeleteMarker", "type": "bool"}, "delete_marker_version_id": {"tag": "xml", "rename": "DeleteMarkerVersionId"}, } _xml_map = { @@ -1184,6 +1188,7 @@ class GetObjectMetaResult(serde.ResultModel): "last_access_time": {"tag": "output", "position": "header", "rename": "x-oss-last-access-time", "type": "datetime,httptime"}, "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + 'transition_time': {'tag': 'output', 'position': 'header', 'rename': 'x-oss-transition-time'}, } def __init__( @@ -1194,6 +1199,7 @@ def __init__( last_access_time: Optional[datetime.datetime] = None, version_id: Optional[str] = None, hash_crc64: Optional[str] = None, + transition_time: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1206,6 +1212,7 @@ def __init__( version_id (str, optional): Version of the object. hash_crc64 (str, optional): The 64-bit CRC value of the object. This value is calculated based on the ECMA-182 standard. + transition_time (str, optional): The time when the storage class of the object is converted to Cold Archive or Deep Cold Archive based on lifecycle rules. """ super().__init__(**kwargs) self.content_length = content_length @@ -1214,6 +1221,7 @@ def __init__( self.last_access_time = last_access_time self.version_id = version_id self.hash_crc64 = hash_crc64 + self.transition_time = transition_time class RestoreRequest(serde.Model): @@ -2804,3 +2812,34 @@ def __init__( "task_id": {"tag": "json", "rename": "TaskId"}, "process_request_id": {"tag": "json", "rename": "RequestId"}, } + + +class CleanRestoredObjectRequest(serde.RequestModel): + """ + The request for the CleanRestoredObject operation. + """ + + _attribute_map = { + 'bucket': {'tag': 'input', 'position': 'host', 'rename': 'bucket', 'type': 'str', 'required': True}, + 'key': {'tag': 'input', 'position': 'path', 'rename': 'key', 'type': 'str', 'required': True}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket + key (str, required): The name of the object. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + + +class CleanRestoredObjectResult(serde.ResultModel): + """ + The request for the CleanRestoredObject operation. + """ \ No newline at end of file diff --git a/alibabacloud_oss_v2/operations/object_basic.py b/alibabacloud_oss_v2/operations/object_basic.py index 9677fcf..c5bc5eb 100644 --- a/alibabacloud_oss_v2/operations/object_basic.py +++ b/alibabacloud_oss_v2/operations/object_basic.py @@ -283,7 +283,7 @@ def delete_multiple_objects(client: _SyncClientImpl, request: models.DeleteMulti ) -def get_object_meta(client: _SyncClientImpl, request: models.HeadObjectRequest, **kwargs) -> models.GetObjectMetaResult: +def get_object_meta(client: _SyncClientImpl, request: models.GetObjectMetaRequest, **kwargs) -> models.GetObjectMetaResult: """ get object meta synchronously @@ -990,3 +990,45 @@ def async_process_object(client: _SyncClientImpl, request: models.AsyncProcessOb serde_utils.deserialize_process_body ], ) + +def clean_restored_object(client: _SyncClientImpl, request: models.CleanRestoredObjectRequest, **kwargs) -> models.CleanRestoredObjectResult: + """ + clean_restored_object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (CleanRestoredObjectRequest): The request for the CleanRestoredObject operation. + + Returns: + CleanRestoredObjectResult: The result for the CleanRestoredObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='CleanRestoredObject', + method='POST', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + parameters={ + 'cleanRestoredObject': '', + }, + bucket=request.bucket, + key=request.key, + op_metadata={'sub-resource': ['cleanRestoredObject']}, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.CleanRestoredObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) diff --git a/tests/integration/test_clean_restored_object_client.py b/tests/integration/test_clean_restored_object_client.py new file mode 100644 index 0000000..99c593c --- /dev/null +++ b/tests/integration/test_clean_restored_object_client.py @@ -0,0 +1,174 @@ +# pylint: skip-file +import time +from typing import cast +import alibabacloud_oss_v2 as oss +from . import TestIntegration, random_bucket_name + + +class TestCleanRestoredObject(TestIntegration): + + def test_clean_restored_object(self): + key = 'demo.txt' + data = b'hello world' + bucket_name = random_bucket_name() + + # create bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA', + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=key, + storage_class='ColdArchive', + body=data, + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # clean restored object + try: + self.client.clean_restored_object(oss.CleanRestoredObjectRequest( + bucket=bucket_name, + key=key, + )) + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(409, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('ArchiveRestoreFileStale', serr.code) + + # restore object + self.client.restore_object(oss.RestoreObjectRequest( + bucket=bucket_name, + key=key, + restore_request=oss.RestoreRequest( + days=1, + tier="Expedited", + ) + )) + + time.sleep(1) + + # clean restored object + try: + self.client.clean_restored_object(oss.CleanRestoredObjectRequest( + bucket=bucket_name, + key=key, + )) + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(409, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('ArchiveRestoreNotFinished', serr.code) + + + def test_clean_restored_object_v1(self): + key = 'demo.txt' + data = b'hello world' + bucket_name = random_bucket_name() + + # create bucket + result = self.signv1_client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA', + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + result = self.signv1_client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=key, + storage_class='ColdArchive', + body=data, + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # clean restored object + try: + self.signv1_client.clean_restored_object(oss.CleanRestoredObjectRequest( + bucket=bucket_name, + key=key, + )) + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(409, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('ArchiveRestoreFileStale', serr.code) + + # restore object + self.signv1_client.restore_object(oss.RestoreObjectRequest( + bucket=bucket_name, + key=key, + restore_request=oss.RestoreRequest( + days=1, + tier="Expedited", + ) + )) + + time.sleep(1) + + # clean restored object + try: + self.signv1_client.clean_restored_object(oss.CleanRestoredObjectRequest( + bucket=bucket_name, + key=key, + )) + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(409, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('ArchiveRestoreNotFinished', serr.code) + + + def test_clean_restored_object_fail(self): + # create bucket + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + + # clean restored object + try: + self.invalid_client.clean_restored_object(oss.CleanRestoredObjectRequest( + bucket=bucket_name, + key='demo.jpg', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) diff --git a/tests/unit/models/test_bucket_basic.py b/tests/unit/models/test_bucket_basic.py index 22bd76e..ee0c9f8 100644 --- a/tests/unit/models/test_bucket_basic.py +++ b/tests/unit/models/test_bucket_basic.py @@ -445,6 +445,7 @@ def test_constructor_result(self): display_name='user_example', ), restore_info='ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', + transition_time='2023-12-17T00:20:57.000Z', )], common_prefixes=[model.CommonPrefix( prefix='fun/movie/', @@ -472,6 +473,7 @@ def test_constructor_result(self): self.assertEqual('0022012****', result.contents[0].owner.id) self.assertEqual('user_example', result.contents[0].owner.display_name) self.assertEqual('fun/movie/', result.common_prefixes[0].prefix) + self.assertEqual('2023-12-17T00:20:57.000Z', result.contents[0].transition_time) result = model.ListObjectsV2Result( name='example-bucket', @@ -518,6 +520,7 @@ def test_deserialize_result(self): user-example22 ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT" + 2023-12-08T08:12:20.000Z a/b/ @@ -556,6 +559,7 @@ def test_deserialize_result(self): self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[1].restore_info) self.assertEqual('0022012****22', result.contents[1].owner.id) self.assertEqual('user-example22', result.contents[1].owner.display_name) + self.assertEqual('2023-12-08T08:12:20.000Z', result.contents[1].transition_time) class TestGetBucketStat(unittest.TestCase): @@ -817,6 +821,7 @@ def test_constructor_result(self): display_name='user_example', ), restore_info='ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', + transition_time='2023-12-17T00:20:57.000Z', )], common_prefixes=[model.CommonPrefix( prefix='fun/movie/', @@ -840,6 +845,7 @@ def test_constructor_result(self): self.assertEqual('0022012****', result.contents[0].owner.id) self.assertEqual('user_example', result.contents[0].owner.display_name) self.assertEqual('fun/movie/', result.common_prefixes[0].prefix) + self.assertEqual('2023-12-17T00:20:57.000Z', result.contents[0].transition_time) result = model.ListObjectsV2Result( name='example-bucket', @@ -885,6 +891,7 @@ def test_deserialize_result(self): user-example22 ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT" + 2023-12-08T08:12:20.000Z a/b/ @@ -920,6 +927,7 @@ def test_deserialize_result(self): self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[1].restore_info) self.assertEqual('0022012****22', result.contents[1].owner.id) self.assertEqual('user-example22', result.contents[1].owner.display_name) + self.assertEqual('2023-12-08T08:12:20.000Z', result.contents[1].transition_time) class TestGetBucketInfo(unittest.TestCase): @@ -1436,6 +1444,7 @@ def test_constructor_result(self): display_name='1250000000', ), restore_info='ongoing-request="true"', + transition_time='2023-12-17T00:20:57.000Z', )], delete_marker=[model.DeleteMarkerProperties( key='demo%2FREADME-CN.md', @@ -1510,6 +1519,7 @@ def test_constructor_result(self): self.assertEqual('250692521021****', result.delete_marker[1].owner.display_name) self.assertEqual('demo%2F.git%2F', result.common_prefixes[0].prefix) self.assertEqual('demo%2F.idea%2F', result.common_prefixes[1].prefix) + self.assertEqual('2023-12-17T00:20:57.000Z', result.version[1].transition_time) result = model.ListObjectVersionsResult( version_id_marker='BgICDzK6NnBgiIGRlZWJhY', @@ -1560,6 +1570,7 @@ def test_deserialize_result(self): 1250000000 ongoing-request="true" + 2023-12-08T08:12:20.000Z demo%2FREADME-CN.md @@ -1636,4 +1647,5 @@ def test_deserialize_result(self): self.assertEqual('150692521021****', result.delete_marker[1].owner.id) self.assertEqual('250692521021****', result.delete_marker[1].owner.display_name) self.assertEqual('demo%2F.git%2F', result.common_prefixes[0].prefix) - self.assertEqual('demo%2F.idea%2F', result.common_prefixes[1].prefix) \ No newline at end of file + self.assertEqual('demo%2F.idea%2F', result.common_prefixes[1].prefix) + self.assertEqual('2023-12-08T08:12:20.000Z', result.version[1].transition_time) \ No newline at end of file diff --git a/tests/unit/models/test_bucket_inventory.py b/tests/unit/models/test_bucket_inventory.py index 6898f28..e11ab24 100644 --- a/tests/unit/models/test_bucket_inventory.py +++ b/tests/unit/models/test_bucket_inventory.py @@ -298,6 +298,7 @@ def test_deserialize_result(self): StorageClass IsMultipartUploaded EncryptionStatus + TransitionTime ''' @@ -331,6 +332,7 @@ def test_deserialize_result(self): self.assertEqual(InventoryOptionalFieldType.STORAGE_CLASS, result.inventory_configuration.optional_fields.fields[3]) self.assertEqual(InventoryOptionalFieldType.IS_MULTIPART_UPLOADED, result.inventory_configuration.optional_fields.fields[4]) self.assertEqual(InventoryOptionalFieldType.ENCRYPTION_STATUS, result.inventory_configuration.optional_fields.fields[5]) + self.assertEqual(InventoryOptionalFieldType.TRANSITION_TIME, result.inventory_configuration.optional_fields.fields[6]) class TestListBucketInventory(unittest.TestCase): diff --git a/tests/unit/models/test_clean_restored_object.py b/tests/unit/models/test_clean_restored_object.py new file mode 100644 index 0000000..2bc2eba --- /dev/null +++ b/tests/unit/models/test_clean_restored_object.py @@ -0,0 +1,72 @@ +# pylint: skip-file + +import unittest +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2.models import object_basic as model +from alibabacloud_oss_v2.types import OperationInput, OperationOutput, CaseInsensitiveDict, HttpResponse +from .. import MockHttpResponse + +class TestCleanRestoredObject(unittest.TestCase): + def test_constructor_request(self): + request = model.CleanRestoredObjectRequest( + ) + self.assertIsNone(request.bucket) + self.assertIsNone(request.key) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.CleanRestoredObjectRequest( + bucket='bucketexampletest', + key='multipart.data', + ) + self.assertEqual('bucketexampletest', request.bucket) + self.assertEqual('multipart.data', request.key) + + def test_serialize_request(self): + request = model.CleanRestoredObjectRequest( + bucket='bucketexampletest', + key='test.jpg', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='CleanRestoredObject', + method='POST', + bucket=request.bucket, + )) + self.assertEqual('CleanRestoredObject', op_input.op_name) + self.assertEqual('POST', op_input.method) + self.assertEqual('bucketexampletest', op_input.bucket) + + def test_constructor_result(self): + result = model.CleanRestoredObjectResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.CleanRestoredObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'x-oss-hash-crc64ecma': '316181249502703****', + 'x-oss-version-id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('316181249502703****', result.headers.get('x-oss-hash-crc64ecma')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('x-oss-version-id')) + diff --git a/tests/unit/models/test_object_basic.py b/tests/unit/models/test_object_basic.py index 93fae28..3d3375f 100644 --- a/tests/unit/models/test_object_basic.py +++ b/tests/unit/models/test_object_basic.py @@ -370,6 +370,7 @@ def test_constructor_result(self): self.assertIsNone(result.allow_age) self.assertIsNone(result.allow_headers) self.assertIsNone(result.expose_headers) + self.assertIsNone(result.transition_time) self.assertIsInstance(result, serde.Model) result = model.HeadObjectResult( @@ -406,6 +407,7 @@ def test_constructor_result(self): allow_age='1111', allow_headers='{a:a1, b:b2}', expose_headers='{a:a1, b:b2}', + transition_time='2024-10-12T00:00:00.000Z', ) self.assertEqual(1024, result.content_length) self.assertEqual('text/xml', result.content_type) @@ -435,6 +437,7 @@ def test_constructor_result(self): self.assertEqual('1111', result.allow_age) self.assertEqual('{a:a1, b:b2}', result.allow_headers) self.assertEqual('{a:a1, b:b2}', result.expose_headers) + self.assertEqual('2024-10-12T00:00:00.000Z', result.transition_time) result = model.HeadObjectResult( expose_headers='expose_headers-test', @@ -1497,11 +1500,11 @@ def test_deserialize_result(self): self.assertEqual("url", result.encoding_type) self.assertEqual("multipart.data", result.deleted_objects[0].key) self.assertEqual("CAEQNRiBgICEoPiC0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk****", result.deleted_objects[0].version_id) - self.assertEqual("true", result.deleted_objects[0].delete_marker) + self.assertEqual(True, result.deleted_objects[0].delete_marker) self.assertEqual("CAEQMhiBgIDXiaaB0BYiIGQzYmRkZGUxMTM1ZDRjOTZhNjk4YjRjMTAyZjhl****", result.deleted_objects[0].delete_marker_version_id) self.assertEqual("test.jpg", result.deleted_objects[1].key) self.assertEqual("0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk****", result.deleted_objects[1].version_id) - self.assertEqual("true", result.deleted_objects[1].delete_marker) + self.assertEqual(True, result.deleted_objects[1].delete_marker) self.assertEqual("CAEQMhiBgIDB3aWB0BYiIGUzYTA3YzliMzVmNzRkZGM5NjllYTVlMjYyYWEy****", result.deleted_objects[1].delete_marker_version_id) @@ -1572,10 +1575,37 @@ def test_serialize_request(self): self.assertEqual('GET', op_input.method) self.assertEqual('bucket_name', op_input.bucket) self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', op_input.parameters.get('versionId')) def test_constructor_result(self): result = model.GetObjectMetaResult() - self.assertIsInstance(result, serde.ResultModel) + self.assertIsNone(result.content_length) + self.assertIsNone(result.etag) + self.assertIsNone(result.last_modified) + self.assertIsNone(result.last_access_time) + self.assertIsNone(result.version_id) + self.assertIsNone(result.hash_crc64) + self.assertIsNone(result.transition_time) + self.assertIsInstance(result, serde.Model) + + result = model.GetObjectMetaResult( + content_length=101, + etag='"D41D8CD98F00B204E9800998ECF8****"', + last_modified=datetime.datetime.fromtimestamp(1702743657), + last_access_time=datetime.datetime.fromtimestamp(1702743657), + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + hash_crc64='316181249502703****', + transition_time='2024-10-12T00:00:00.000Z', + ) + self.assertEqual(101, result.content_length) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.etag) + self.assertEqual(datetime.datetime.fromtimestamp(1702743657), result.last_modified) + self.assertEqual('2023-12-17T00:20:57.000Z', result.last_modified.strftime("%Y-%m-%dT%H:%M:%S.000Z")) + self.assertEqual(datetime.datetime.fromtimestamp(1702743657), result.last_access_time) + self.assertEqual('2023-12-17T00:20:57.000Z', result.last_access_time.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.version_id) + self.assertEqual('316181249502703****', result.hash_crc64) + self.assertEqual('2024-10-12T00:00:00.000Z', result.transition_time) def test_deserialize_result(self): xml_data = None From 28be827b795564c1b0ea8af9737b4ae68184b85e Mon Sep 17 00:00:00 2001 From: zhuxiaolong37 Date: Mon, 10 Feb 2025 18:09:16 +0800 Subject: [PATCH 2/6] Modify incorrect comments --- alibabacloud_oss_v2/models/object_basic.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/alibabacloud_oss_v2/models/object_basic.py b/alibabacloud_oss_v2/models/object_basic.py index 0b19b4d..fde7295 100644 --- a/alibabacloud_oss_v2/models/object_basic.py +++ b/alibabacloud_oss_v2/models/object_basic.py @@ -944,9 +944,7 @@ def __init__( source_version_id (str, optional): The version ID of the source object. server_side_encryption (str, optional): The encryption method on the server side when an object is created. Valid values: AES256 and KMS - server_side_data_encryption (str, optional): The ID of the customer master key (CMK) - that is managed by Key Management Service (KMS). This header is valid only when the x-oss-server-side-encryption header - is set to KMS. + server_side_data_encryption (str, optional): The server side data encryption algorithm. server_side_encryption_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). last_modified (str, optional): The time when the returned objects were last modified. etag (str, optional): The entity tag (ETag). @@ -2060,7 +2058,7 @@ def __init__( class ListMultipartUploadsResult(serde.ResultModel): - """The result for the ListBuckets operation.""" + """The result for the ListMultipartUploads operation.""" _attribute_map = { "encoding_type": {"tag": "xml", "rename": "EncodingType"}, From 863476a5d6a36b05d6d44920915bf0f904a5c433 Mon Sep 17 00:00:00 2001 From: "guangjun.hgj" Date: Mon, 3 Mar 2025 15:35:01 +0800 Subject: [PATCH 3/6] add operation level read-write timeout --- alibabacloud_oss_v2/_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/alibabacloud_oss_v2/_client.py b/alibabacloud_oss_v2/_client.py index 394e3b7..1a90804 100644 --- a/alibabacloud_oss_v2/_client.py +++ b/alibabacloud_oss_v2/_client.py @@ -460,6 +460,8 @@ def _sent_http_request_once(self, context: SigningContext, options: _Options) -> send_kwargs = {} if options.response_stream is not None: send_kwargs['stream'] = options.response_stream + if options.readwrite_timeout is not None: + send_kwargs['readwrite_timeout'] = options.readwrite_timeout response = options.http_client.send(context.request, **send_kwargs) From e05b3946d018994577775437e208018443bfd3f0 Mon Sep 17 00:00:00 2001 From: "guangjun.hgj" Date: Mon, 3 Mar 2025 16:49:32 +0800 Subject: [PATCH 4/6] change the type of the body in put_access_point_policy and put_bucket_policy. --- alibabacloud_oss_v2/models/access_point.py | 8 ++++---- alibabacloud_oss_v2/models/bucket_policy.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/alibabacloud_oss_v2/models/access_point.py b/alibabacloud_oss_v2/models/access_point.py index e671aad..995c614 100644 --- a/alibabacloud_oss_v2/models/access_point.py +++ b/alibabacloud_oss_v2/models/access_point.py @@ -1,6 +1,6 @@ import io from typing import Optional, List, Any -from .. import serde +from .. import serde, BodyType class AccessPointVpcConfiguration(serde.Model): @@ -432,20 +432,20 @@ class PutAccessPointPolicyRequest(serde.RequestModel): _attribute_map = { 'bucket': {'tag': 'input', 'position': 'host', 'rename': 'bucket', 'type': 'str'}, 'access_point_name': {'tag': 'input', 'position': 'header', 'rename': 'x-oss-access-point-name', 'type': 'str'}, - 'body': {'tag': 'input', 'position': 'body', 'rename': 'nop', 'type': 'str'}, + 'body': {'tag': 'input', 'position': 'body', 'rename': 'nop'}, } def __init__( self, bucket: Optional[str] = None, access_point_name: Optional[str] = None, - body: Optional[str] = None, + body: Optional[BodyType] = None, **kwargs: Any ) -> None: """ bucket (str, optional): The name of the bucket. access_point_name (str, optional): The name of the access point. - body (str, optional): The configurations of the access point policy. + body (BodyType, optional): The configurations of the access point policy. """ super().__init__(**kwargs) self.bucket = bucket diff --git a/alibabacloud_oss_v2/models/bucket_policy.py b/alibabacloud_oss_v2/models/bucket_policy.py index 6e30de4..08e7bd8 100644 --- a/alibabacloud_oss_v2/models/bucket_policy.py +++ b/alibabacloud_oss_v2/models/bucket_policy.py @@ -1,5 +1,5 @@ from typing import Optional, List, Any, Union -from .. import serde +from .. import serde, BodyType class PolicyStatus(serde.Model): @@ -36,18 +36,18 @@ class PutBucketPolicyRequest(serde.RequestModel): _attribute_map = { 'bucket': {'tag': 'input', 'position': 'host', 'rename': 'bucket', 'type': 'str', 'required': True}, - 'body': {'tag': 'input', 'position': 'body', 'rename': 'nop', 'type': 'str', 'required': True}, + 'body': {'tag': 'input', 'position': 'body', 'rename': 'nop', 'required': True}, } def __init__( self, bucket: str = None, - body: Optional[str] = None, + body: Optional[BodyType] = None, **kwargs: Any ) -> None: """ bucket (str, required): The name of the bucket. - body (str, required): The request parameters. + body (BodyType, required): The request parameters. """ super().__init__(**kwargs) self.bucket = bucket From 8fc988ce3469a32ed3f5e08af473266640157e96 Mon Sep 17 00:00:00 2001 From: "guangjun.hgj" Date: Mon, 3 Mar 2025 17:07:48 +0800 Subject: [PATCH 5/6] fix the issue that callback result can not be obtained. --- .../operations/object_basic.py | 29 ++-- alibabacloud_oss_v2/serde.py | 12 ++ sample/multipart_upload_callback.py | 74 ++++++++++ sample/post_object_callback.py | 132 ++++++++++++++++++ sample/put_object_callback.py | 54 +++++++ 5 files changed, 292 insertions(+), 9 deletions(-) create mode 100644 sample/multipart_upload_callback.py create mode 100644 sample/post_object_callback.py create mode 100644 sample/put_object_callback.py diff --git a/alibabacloud_oss_v2/operations/object_basic.py b/alibabacloud_oss_v2/operations/object_basic.py index c5bc5eb..9313bbf 100644 --- a/alibabacloud_oss_v2/operations/object_basic.py +++ b/alibabacloud_oss_v2/operations/object_basic.py @@ -43,13 +43,19 @@ def put_object(client: _SyncClientImpl, request: models.PutObjectRequest, **kwar op_output = client.invoke_operation(op_input, **kwargs) + serdes = [ + serde.deserialize_output_headers + ] + + if request.callback is None: + serdes.append(serde.deserialize_output_discardbody) + else: + serdes.append(serde.deserialize_output_callbackbody) + return serde.deserialize_output( result=models.PutObjectResult(), op_output=op_output, - custom_deserializer=[ - serde.deserialize_output_discardbody, - serde.deserialize_output_headers - ], + custom_deserializer=serdes, ) @@ -583,14 +589,19 @@ def complete_multipart_upload(client: _SyncClientImpl, request: models.CompleteM op_output = client.invoke_operation(op_input, **kwargs) + serdes = [ + serde.deserialize_output_headers, + serde_utils.deserialize_encode_type + ] + if request.callback is None: + serdes.append(serde.deserialize_output_xmlbody) + else: + serdes.append(serde.deserialize_output_callbackbody) + return serde.deserialize_output( result=models.CompleteMultipartUploadResult(), op_output=op_output, - custom_deserializer=[ - serde.deserialize_output_xmlbody, - serde.deserialize_output_headers, - serde_utils.deserialize_encode_type - ], + custom_deserializer=serdes, ) diff --git a/alibabacloud_oss_v2/serde.py b/alibabacloud_oss_v2/serde.py index b762404..3f45735 100644 --- a/alibabacloud_oss_v2/serde.py +++ b/alibabacloud_oss_v2/serde.py @@ -713,3 +713,15 @@ def copy_request(dst: RequestModel, src: RequestModel): if src_value is None: continue setattr(dst, attr, src_value) + + +def deserialize_output_callbackbody(result: Model, op_output: OperationOutput): + """deserialize output callback body + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + """ + callback_body = op_output.http_response.content + callback_result = callback_body.decode() + setattr(result, 'callback_result', callback_result) \ No newline at end of file diff --git a/sample/multipart_upload_callback.py b/sample/multipart_upload_callback.py new file mode 100644 index 0000000..30a3f07 --- /dev/null +++ b/sample/multipart_upload_callback.py @@ -0,0 +1,74 @@ +import base64 +import os +import argparse +import alibabacloud_oss_v2 as oss + +parser = argparse.ArgumentParser(description="upload part sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--file_path', help='The path of Upload file.', required=True) +parser.add_argument('--callback_url', help='Callback server address.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=args.bucket, + key=args.key, + )) + print(vars(result)) + + part_size = 100 * 1024 + data_size = os.path.getsize(args.filename) + part_number = 1 + upload_parts = [] + with open(args.filename, 'rb') as f: + for start in range(0, data_size, part_size): + n = part_size + if start + n > data_size: + n = data_size - start + reader = oss.io_utils.SectionReader(oss.io_utils.ReadAtReader(f), start, n) + up_result = client.upload_part(oss.UploadPartRequest( + bucket=args.bucket, + key=args.key, + upload_id=result.upload_id, + part_number=part_number, + body=reader + )) + print(vars(result)) + + upload_parts.append(oss.UploadPart(part_number=part_number, etag=up_result.etag)) + part_number += 1 + + parts = sorted(upload_parts, key=lambda p: p.part_number) + result = client.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=args.bucket, + key=args.key, + upload_id=result.upload_id, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=parts + ), + callback=base64.b64encode(str('{\"callbackUrl\":\"'+args.callback_url+'\",\"callbackBody\":\"bucket=${bucket}&object=${object}&my_var_1=${x:var1}&my_var_2=${x:var2}\"}').encode()).decode(), + callback_var=base64.b64encode('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}'.encode()).decode(), + )) + + print(vars(result)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/sample/post_object_callback.py b/sample/post_object_callback.py new file mode 100644 index 0000000..0d75040 --- /dev/null +++ b/sample/post_object_callback.py @@ -0,0 +1,132 @@ +import argparse +import base64 +import hashlib +import hmac +import json +import random +import requests +from datetime import datetime, timedelta +import alibabacloud_oss_v2 as oss + +parser = argparse.ArgumentParser(description="post object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--callback_url', help='Callback server address.', required=True) + + +def main(): + content = "hi oss" + product = "oss" + + args = parser.parse_args() + region = args.region + bucket_name = args.bucket + object_name = args.key + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + credential = credentials_provider.get_credentials() + access_key_id = credential.access_key_id + access_key_secret = credential.access_key_secret + + utc_time = datetime.utcnow() + date = utc_time.strftime("%Y%m%d") + expiration = utc_time + timedelta(hours=1) + policy_map = { + "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "conditions": [ + {"bucket": bucket_name}, + {"x-oss-signature-version": "OSS4-HMAC-SHA256"}, + {"x-oss-credential": f"{access_key_id}/{date}/{region}/{product}/aliyun_v4_request"}, + {"x-oss-date": utc_time.strftime("%Y%m%dT%H%M%SZ")}, + ["content-length-range", 1, 1024] + ] + } + policy = json.dumps(policy_map) + string_to_sign = base64.b64encode(policy.encode()).decode() + + def build_post_body(field_dict, boundary): + post_body = '' + + # Encoding Form Fields + for k, v in field_dict.items(): + if k != 'content' and k != 'content-type': + post_body += '''--{0}\r\nContent-Disposition: form-data; name=\"{1}\"\r\n\r\n{2}\r\n'''.format(boundary, k, v) + + # The content of the uploaded file must be the last form field + post_body += '''--{0}\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\n{1}'''.format( + boundary, field_dict['content']) + + # Add a form field terminator + post_body += '\r\n--{0}--\r\n'.format(boundary) + + return post_body.encode('utf-8') + + signing_key = "aliyun_v4" + access_key_secret + h1 = hmac.new(signing_key.encode(), date.encode(), hashlib.sha256) + h1_key = h1.digest() + h2 = hmac.new(h1_key, region.encode(), hashlib.sha256) + h2_key = h2.digest() + h3 = hmac.new(h2_key, product.encode(), hashlib.sha256) + h3_key = h3.digest() + h4 = hmac.new(h3_key, "aliyun_v4_request".encode(), hashlib.sha256) + h4_key = h4.digest() + + h = hmac.new(h4_key, string_to_sign.encode(), hashlib.sha256) + signature = h.hexdigest() + + field_dict = {} + field_dict['key'] = object_name + field_dict['policy'] = string_to_sign + field_dict['x-oss-signature-version'] = "OSS4-HMAC-SHA256" + field_dict['x-oss-credential'] = f"{access_key_id}/{date}/{region}/{product}/aliyun_v4_request" + field_dict['x-oss-date'] = f"{utc_time.strftime('%Y%m%dT%H%M%SZ')}" + field_dict['x-oss-signature'] = signature + field_dict['content'] = content + + def encode_callback(callback_params): + cb_str = json.dumps(callback_params).strip() + return base64.b64encode(cb_str.encode()).decode() + + # Set upload callback parameters. + callback_params = {} + # Set the server address for callback requests, for example http://oss-demo.aliyuncs.com:23450 . + callback_params['callbackUrl'] = args.callback_url + # (Optional) Set the value of Host in the callback request header, which is the value configured for Host on your server. + # callback_params['callbackHost'] = 'yourCallbackHost' + # Set the value of the request body when initiating a callback. + callback_params['callbackBody'] = 'bucket=${bucket}&object=${object}&my_var_1=${x:my_var1}&my_var_2=${x:my_var2}' + # Set the Content Type for initiating callback requests. + callback_params['callbackBodyType'] = 'application/x-www-form-urlencoded' + encoded_callback = encode_callback(callback_params) + # Set custom parameters for initiating callback requests, consisting of Key and Value, with Key starting with x:. + callback_var_params = {'x:my_var1': 'my_val1', 'x:my_var2': 'my_val2'} + + + # 上传回调。 + field_dict['callback'] = encoded_callback + field_dict['x:my_var1'] = 'value1' + field_dict['x:my_var2'] = 'value2' + + # The boundary string of the form field is usually a random string + boundary = ''.join(random.choice('0123456789') for _ in range(11)) + # Send POST request + body = build_post_body(field_dict, boundary) + + url = f"http://{bucket_name}.oss-{region}.aliyuncs.com" + headers = { + "Content-Type": f"multipart/form-data; boundary={boundary}", + } + response = requests.post(url, data=body, headers=headers) + + if response.status_code // 100 != 2: + print(f"Post Object Fail, status code: {response.status_code}, reason: {response.reason}") + else: + print(f"post object done, status code: {response.status_code}, request id: {response.headers.get('X-Oss-Request-Id')}") + + print(f"response: {response.text}") + +if __name__ == "__main__": + main() diff --git a/sample/put_object_callback.py b/sample/put_object_callback.py new file mode 100644 index 0000000..c102cae --- /dev/null +++ b/sample/put_object_callback.py @@ -0,0 +1,54 @@ +import base64 +import argparse +import alibabacloud_oss_v2 as oss + +parser = argparse.ArgumentParser(description="put object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--callback_url', help='Callback server address.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + data = "name,school,company,age\nLora Francis,School,Staples Inc,27\n#Lora Francis,School,Staples Inc,27\nEleanor Little,School,\"Conectiv, Inc\",43\nRosie Hughes,School,Western Gas Resources Inc,44\nLawrence Ross,School,MetLife Inc.,24\n" + + result = client.put_object(oss.PutObjectRequest( + bucket=args.bucket, + key=args.key, + body=data, + callback=base64.b64encode(str('{\"callbackUrl\":\"' + args.callback_url + '\",\"callbackBody\":\"bucket=${bucket}&object=${object}&my_var_1=${x:var1}&my_var_2=${x:var2}\"}').encode()).decode(), + callback_var=base64.b64encode('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}'.encode()).decode(), + headers={ + 'Content-Type': 'text/plain', + 'Cache-Control': 'no-cache', + 'content-language': 'zh-CN', + 'content-encoding': 'gzip', + 'Content-Disposition': 'attachment;filename=aaa.txt', + 'Expires': '2022-10-12T00:00:00.000Z', + 'auth': 'owner', + 'x-oss-auth': 'owner2', + 'x-oss-meta-auth': 'owner3', + } + )) + + print(vars(result)) + + +if __name__ == "__main__": + main() From 11349f2702299b7b0352df8f930a0788ede7f0a1 Mon Sep 17 00:00:00 2001 From: zhuxiaolong37 Date: Mon, 3 Mar 2025 17:52:43 +0800 Subject: [PATCH 6/6] add copier --- alibabacloud_oss_v2/__init__.py | 6 + alibabacloud_oss_v2/client.py | 9 + alibabacloud_oss_v2/copier.py | 686 +++++++++++++++++++ alibabacloud_oss_v2/exceptions.py | 6 + sample/copier.py | 61 ++ tests/integration/test_client.py | 1024 +++++++++++++++++++++++++++++ 6 files changed, 1792 insertions(+) create mode 100644 alibabacloud_oss_v2/copier.py create mode 100644 sample/copier.py diff --git a/alibabacloud_oss_v2/__init__.py b/alibabacloud_oss_v2/__init__.py index 85013d5..ae7af19 100644 --- a/alibabacloud_oss_v2/__init__.py +++ b/alibabacloud_oss_v2/__init__.py @@ -58,6 +58,12 @@ UploadError ) +from .copier import ( + Copier, + CopyResult, + CopyError +) + from .paginator import ( ListObjectsPaginator, ListObjectsV2Paginator, diff --git a/alibabacloud_oss_v2/client.py b/alibabacloud_oss_v2/client.py index e0da51e..bca867e 100644 --- a/alibabacloud_oss_v2/client.py +++ b/alibabacloud_oss_v2/client.py @@ -11,6 +11,7 @@ from . import exceptions from .downloader import Downloader from .uploader import Uploader +from .copier import Copier from .progress import Progress from .crc import Crc64 from .paginator import ( @@ -696,6 +697,14 @@ def uploader(self, **kwargs) -> Uploader: """ return Uploader(self, **kwargs) + def copier(self, **kwargs) -> Copier: + """_summary_ + + Returns: + copier: _description_ + """ + return Copier(self, **kwargs) + # file like objects def append_file(self, bucket: str, key: str, request_payer: Optional[str] = None, diff --git a/alibabacloud_oss_v2/copier.py b/alibabacloud_oss_v2/copier.py new file mode 100644 index 0000000..e3d0334 --- /dev/null +++ b/alibabacloud_oss_v2/copier.py @@ -0,0 +1,686 @@ +"""Copier for handling objects for uploads.""" +# pylint: disable=line-too-long, broad-exception-caught +import abc +import copy +import os +import concurrent.futures +import threading +from typing import Any, Optional, IO, MutableMapping, List +from . import exceptions, HeadObjectResult, GetObjectTaggingResult +from . import models +from . import validation +from . import utils +from . import defaults +from .serde import copy_request +from .paginator import ListPartsPaginator + +metadata_copied = { + "content-type": None, + "content-language": None, + "content-encoding": None, + "content-disposition": None, + "cache-control": None, + "expires": None, +} + +class CopyAPIClient(abc.ABC): + """Abstract base class for copier client.""" + + @abc.abstractmethod + def copy_object(self, request: models.CopyObjectRequest, **kwargs) -> models.CopyObjectResult: + """Copies objects.""" + + @abc.abstractmethod + def head_object(self, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """Queries information about the object in a bucket.""" + + @abc.abstractmethod + def initiate_multipart_upload(self, request: models.InitiateMultipartUploadRequest, **kwargs + ) -> models.InitiateMultipartUploadResult: + """ + Initiates a multipart upload task before you can upload data + in parts to Object Storage Service (OSS). + """ + + @abc.abstractmethod + def upload_part_copy(self, request: models.UploadPartCopyRequest, **kwargs) -> models.UploadPartCopyResult: + """ + You can call this operation to copy data from an existing object to upload a part + by adding a x-oss-copy-request header to UploadPart. + """ + + @abc.abstractmethod + def complete_multipart_upload(self, request: models.CompleteMultipartUploadRequest, **kwargs + ) -> models.CompleteMultipartUploadResult: + """ + Completes the multipart upload task of an object after all parts + of the object are uploaded. + """ + + @abc.abstractmethod + def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, **kwargs + ) -> models.AbortMultipartUploadResult: + """ + Cancels a multipart upload task and deletes the parts uploaded in the task. + """ + + @abc.abstractmethod + def list_parts(self, request: models.ListPartsRequest, **kwargs + ) -> models.ListPartsResult: + """ + Lists all parts that are uploaded by using a specified upload ID. + """ + @abc.abstractmethod + def get_object_tagging(self, request: models.GetObjectTaggingRequest, **kwargs + ) -> models.GetObjectTaggingResult: + """ + You can call this operation to query the tags of an object. + """ + +class CopierOptions: + """_summary_ + """ + + def __init__( + self, + part_size: Optional[int] = None, + parallel_num: Optional[int] = None, + leave_parts_on_error: Optional[bool] = None, + disable_shallow_copy: Optional[bool] = None, + metadata_properties: Optional[HeadObjectResult] = None, + tag_properties: Optional[GetObjectTaggingResult] = None, + ) -> None: + self.part_size = part_size + self.parallel_num = parallel_num + self.leave_parts_on_error = leave_parts_on_error or False + self.disable_shallow_copy = disable_shallow_copy or False + self.metadata_properties = metadata_properties + self.tag_properties = tag_properties + + +class CopyResult: + """_summary_ + """ + + def __init__( + self, + upload_id: Optional[str] = None, + etag: Optional[str] = None, + version_id: Optional[str] = None, + hash_crc64: Optional[str] = None, + ) -> None: + self.upload_id = upload_id + self.etag = etag + self.version_id = version_id + self.hash_crc64 = hash_crc64 + + self.status = '' + self.status_code = 0 + self.request_id = '' + self.headers: MutableMapping[str, str] = {} + +class CopyError(exceptions.BaseError): + """ + Copy Error. + """ + fmt = 'copy failed, {upload_id}, {path}, {error}.' + + def __init__(self, **kwargs): + exceptions.BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + self.upload_id = kwargs.get("upload_id", None) + self.path = kwargs.get("path", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + + +class Copier: + """Copy for handling objects for uploads.""" + + def __init__( + self, + client: CopyAPIClient, + **kwargs: Any + ) -> None: + """ + client (CopyAPIClient): A agent that implements the CopyObject and Multipart Copy api. + """ + part_size = kwargs.get('part_size', defaults.DEFAULT_UPLOAD_PART_SIZE) + parallel_num = kwargs.get('parallel_num', defaults.DEFAULT_UPLOAD_PARALLEL) + leave_parts_on_error = kwargs.get('leave_parts_on_error', False) + self._client = client + self._options = CopierOptions( + part_size=part_size, + parallel_num=parallel_num, + leave_parts_on_error=leave_parts_on_error, + disable_shallow_copy=kwargs.get('disable_shallow_copy', False), + ) + + feature_flags = 0 + is_eclient = False + cstr = str(client) + if cstr == '': + feature_flags = client._client._options.feature_flags + elif cstr == '': + feature_flags = client.unwrap()._client._options.feature_flags + is_eclient = True + self._feature_flags = feature_flags + self._is_eclient = is_eclient + + + def copy( + self, + request: models.CopyObjectRequest, + **kwargs: Any + ) -> CopyResult: + """_summary_ + + Args: + request (models.CopyObjectRequest): _description_ + reader (IO[bytes]): _description_ + + Returns: + CopyResult: _description_ + """ + delegate = self._delegate(request, **kwargs) + delegate.check_source() + delegate.apply_source() + return delegate.copy() + + def _delegate( + self, + request: models.CopyObjectRequest, + **kwargs: Any + ) -> "_CopierDelegate": + + if request is None: + raise exceptions.ParamNullError(field='request') + + if not validation.is_valid_bucket_name(utils.safety_str(request.bucket)): + raise exceptions.ParamInvalidError(field='request.bucket') + + if not validation.is_valid_object_name(utils.safety_str(request.key)): + raise exceptions.ParamInvalidError(field='request.key') + + + options = copy.copy(self._options) + options.part_size = kwargs.get('part_size', self._options.part_size) + options.parallel_num = kwargs.get('parallel_num', self._options.parallel_num) + options.leave_parts_on_error = kwargs.get('leave_parts_on_error', self._options.leave_parts_on_error) + options.disable_shallow_copy = kwargs.get('disable_shallow_copy', self._options.disable_shallow_copy) + + if options.part_size <= 0: + options.part_size = defaults.DEFAULT_UPLOAD_PART_SIZE + + if options.parallel_num <= 0: + options.parallel_num = defaults.DEFAULT_UPLOAD_PARALLEL + + delegate = _CopierDelegate( + base=self, + client=self._client, + request=request, + options=options + ) + + return delegate + + +class _CopyContext: + def __init__( + self, + upload_id: str = None, + start_num: int = None, + ) -> None: + self.upload_id = upload_id + self.start_num = start_num + + +class _CopierDelegate: + def __init__( + self, + base: Copier, + client: CopyAPIClient, + request: models.CopyObjectRequest, + options: CopierOptions, + metadata_prop: Optional[HeadObjectResult] = None, + tag_prop: Optional[GetObjectTaggingResult] = None, + ) -> None: + """ + """ + self._base = base + self._client = client + self._reqeust = request + self._options = options + self._metadata_prop = metadata_prop + self._tag_prop = tag_prop + + parallel = options.parallel_num > 1 + self._progress_lock = threading.Lock() if parallel else None + + self._reader_pos = 0 + self._total_size = 0 + self._transferred = 0 + self._reader_seekable = False + + #Source's Info + self._filepath: str = None + self._file_stat: os.stat_result = None + + #checkpoint + self._checkpoint = None + + #CRC + self._check_crc = False + self._ccrc = 0 + + #use mulitpart upload + self._copy_part_lock = None + self._copy_errors = [] + self._copy_parts = [] + + # resumable upload + self._upload_id = None + self._part_number = None + + + def check_source(self): + """_summary_ + """ + if self._metadata_prop is not None: + return + + request = models.HeadObjectRequest() + copy_request(request, self._reqeust) + if self._reqeust.source_bucket is not None: + request.bucket = self._reqeust.source_bucket + request.key = self._reqeust.source_key + request.version_id = self._reqeust.source_version_id + result = self._client.head_object(request) + self._metadata_prop = result + + + def apply_source(self): + """_summary_ + """ + + total_size = self._metadata_prop.content_length + if total_size is None: + total_size = -1 + part_size = self._options.part_size + + if total_size > 0: + while self._total_size/part_size >= defaults.MAX_UPLOAD_PARTS: + part_size += self._options.part_size + + self._options.part_size = part_size + self._total_size = total_size + + def can_use_shallow_copy(self): + if self._options.disable_shallow_copy: + return False + + if self._reqeust.storage_class: + return False + + if self._reqeust.source_bucket and self._reqeust.source_bucket != self._reqeust.bucket: + return False + + if self._metadata_prop.server_side_encryption: + return False + + return True + + def update_crc_flag(self): + """_summary_ + """ + #FF_ENABLE_CRC64_CHECK_UPLOAD = 0x00000008 + if (self._base._feature_flags & 0x00000008) > 0: + self._check_crc = True + + + def copy(self) -> CopyResult: + """_summary_ + """ + if 0 <= self._total_size <= self._options.part_size: + return self._single_copy() + elif self.can_use_shallow_copy(): + return self._shallow_copy() + return self._multipart_copy() + + def _single_copy(self) -> CopyResult: + request = models.CopyObjectRequest() + copy_request(request, self._reqeust) + + if request.content_type is None: + request.content_type = self._get_content_type() + + try: + result = self._client.copy_object(request) + except Exception as err: + raise self._wrap_error('', err) + + self._transferred = self._total_size + + ret = CopyResult( + etag=result.etag, + version_id=result.version_id, + hash_crc64=result.hash_crc64, + ) + ret.status = result.status + ret.status_code = result.status_code + ret.request_id = result.request_id + ret.headers = result.headers + + return ret + + def _shallow_copy(self) -> (CopyResult): + request = models.CopyObjectRequest() + copy_request(request, self._reqeust) + + if request.content_type is None: + request.content_type = self._get_content_type() + + # timer_thread = threading.Timer(60 * 30, self.timeout_handle(self._upload_id)) + # timer_thread.start() + try: + result = self._client.copy_object(request) + except exceptions.TimeoutError: + return self._multipart_copy() + except Exception as err: + raise self._wrap_error(self._upload_id, err) + # finally: + # timer_thread.cancel() + + self._transferred = self._total_size + + ret = CopyResult( + etag=result.etag, + version_id=result.version_id, + hash_crc64=result.hash_crc64, + ) + ret.status = result.status + ret.status_code = result.status_code + ret.request_id = result.request_id + ret.headers = result.headers + + return ret + + def _multipart_copy(self) -> CopyResult: + # init the multipart + try: + upload_ctx = self._get_upload_context() + except Exception as err: + raise self._wrap_error('', err) + + # upload part + parallel = self._options.parallel_num > 1 + if parallel: + self._copy_part_lock = threading.Lock() + with concurrent.futures.ThreadPoolExecutor(self._options.parallel_num) as executor: + for result in executor.map(self._copy_part, self._iter_part(upload_ctx)): + self._update_upload_result(result) + else: + for part in self._iter_part(upload_ctx): + self._update_upload_result(self._copy_part(part)) + if len(self._copy_errors) > 0: + break + + + # complete upload + cmresult: models.CompleteMultipartUploadResult = None + if len(self._copy_errors) == 0: + request = models.CompleteMultipartUploadRequest() + copy_request(request, self._reqeust) + parts = sorted(self._copy_parts, key=lambda p: p.part_number) + request.upload_id = upload_ctx.upload_id + request.complete_multipart_upload = models.CompleteMultipartUpload(parts=parts) + try: + cmresult = self._client.complete_multipart_upload(request) + except Exception as err: + self._copy_errors.append(err) + + # check last error + if len(self._copy_errors) > 0: + if not self._options.leave_parts_on_error: + try: + abort_request = models.AbortMultipartUploadRequest() + abort_request.upload_id = upload_ctx.upload_id + copy_request(request, self._reqeust) + self._client.abort_multipart_upload(abort_request) + except Exception as _: + pass + raise self._wrap_error(upload_ctx.upload_id, self._copy_errors[-1]) + + self._assert_crc_same(cmresult.headers) + + ret = CopyResult( + upload_id=upload_ctx.upload_id, + etag=cmresult.etag, + version_id=cmresult.version_id, + hash_crc64=cmresult.hash_crc64, + ) + ret.status = cmresult.status + ret.status_code = cmresult.status_code + ret.request_id = cmresult.request_id + ret.headers = cmresult.headers + + return ret + + + def _get_upload_context(self) -> _CopyContext: + if self._upload_id: + return _CopyContext( + upload_id=self._upload_id, + start_num=self._part_number - 1, + ) + + #if not exist or fail, create a new upload id + request = models.InitiateMultipartUploadRequest() + copy_request(request, self._reqeust) + if request.content_type is None: + request.content_type = self._get_content_type() + + self.overwrite_metadata_prop(request) + + self.overwrite_tag_prop(request) + + result = self._client.initiate_multipart_upload(request) + + return _CopyContext( + upload_id=result.upload_id, + start_num=0, + ) + + def _iter_part(self, upload_ctx: _CopyContext): + start_part_num = upload_ctx.start_num + + while len(self._copy_errors) == 0: + try: + n = self._options.part_size + bytes_left = self._total_size - self._reader_pos + + if bytes_left <= 0: + break + if bytes_left < n: + n = bytes_left + + range_end = self._reader_pos + n - 1 + range = f'bytes={self._reader_pos}-{range_end}' + self._reader_pos += n + + # range = next_range() + except Exception as err: + self._save_error(err) + break + + + start_part_num += 1 + yield upload_ctx.upload_id, start_part_num, range + + + def _copy_part(self, part): + # When an error occurs, ignore other upload requests + if len(self._copy_errors) > 0: + return None + + upload_id = part[0] + part_number = part[1] + range = part[2] + error: Exception = None + etag = None + + try: + result = self._client.upload_part_copy(models.UploadPartCopyRequest( + bucket=self._reqeust.bucket, + key=self._reqeust.key, + upload_id=upload_id, + part_number=part_number, + source_bucket=self._reqeust.source_bucket, + source_key=self._reqeust.source_key, + source_range=range, + request_payer=self._reqeust.request_payer + )) + etag = result.etag + + except Exception as err: + error = err + + return part_number, etag, error + + + def _save_error(self, error) -> None: + if self._copy_part_lock: + with self._copy_part_lock: + self._copy_errors.append(error) + else: + self._copy_errors.append(error) + + + def _get_content_type(self) -> str: + if self._filepath is not None: + return utils.guess_content_type(self._filepath, 'application/octet-stream') + return None + + def _iter_uploaded_part(self): + if self._upload_id is None: + return + try: + paginator = ListPartsPaginator(self._client) + iterator = paginator.iter_page(models.ListPartsRequest( + bucket=self._reqeust.bucket, + key=self._reqeust.key, + request_payer=self._reqeust.request_payer, + upload_id=self._upload_id, + )) + check_part_number = 1 + for page in iterator: + for part in page.parts: + if (part.part_number != check_part_number or + part.size != self._options.part_size): + return + yield part + check_part_number += 1 + except Exception: + self._upload_id = None + + def _update_upload_result(self, result): + if result is None: + return + + if result[2] is not None: + self._save_error(result[2]) + return + + part_number = result[0] + etag = result[1] + + self._copy_parts.append(models.UploadPart(part_number=part_number, etag=etag)) + + def overwrite_metadata_prop(self, im_request: models.InitiateMultipartUploadRequest): + copy_request = self._reqeust + metadata_directive = str(copy_request.metadata_directive).lower() + + if metadata_directive in ["none", "", "copy"]: + if self._metadata_prop is None: + return Exception(f"request.MetadataDirective is COPY, but meets nil metaProp for source") + + im_request.cache_control = None + im_request.content_type = None + im_request.content_disposition = None + im_request.content_encoding = None + im_request.expires = None + im_request.metadata = None + im_request.headers = {} + + # copy meta form source + for k, v in self._metadata_prop.headers.items(): + low_k = k.lower() + if low_k.startswith("x-oss-meta"): + im_request.headers[k] = v + elif low_k in metadata_copied: + im_request.headers[k] = v + + elif metadata_directive == "replace": + # the metadata has been copied via the copyRequest function before + pass + + else: + return Exception(f"Unsupported MetadataDirective, {copy_request.metadata_directive}") + + def overwrite_tag_prop(self, im_request: models.InitiateMultipartUploadRequest): + tagging_directive = str(self._reqeust.tagging_directive).lower() + + if tagging_directive in ["none", "", "copy"]: + + if self._metadata_prop.tagging_count and self._metadata_prop.tagging_count > 0 and self._tag_prop is None: + request = models.GetObjectTaggingRequest() + copy_request(request, self._reqeust) + if self._reqeust.source_bucket != None: + request.bucket = self._reqeust.source_bucket + + request.key = self._reqeust.source_key + request.version_id = self._reqeust.source_version_id + + result = self._client.get_object_tagging( + request=request + ) + self._tag_prop = result + + if self._tag_prop: + tags = [] + for t in self._tag_prop.tag_set.tags: + tags.append(f"{str(t.key)}={str(t.value)}") + if tags: + im_request.tagging = '&'.join(tags) + + + elif tagging_directive == "replace": + # the metadata has been copied via the copyRequest function before + pass + + else: + return Exception(f"Unsupported TaggingDirective, {tagging_directive}") + + def _assert_crc_same(self, headers: MutableMapping): + if not self._check_crc: + return + + scrc = headers.get('x-oss-hash-crc64ecma', None) + if scrc is None: + return + + ccrc = str(self._ccrc) + if scrc != ccrc: + raise self._wrap_error(self._upload_id, exceptions.InconsistentError(client_crc=ccrc, server_crc=scrc)) + + def _wrap_error(self, upload_id: str, error: Exception) -> Exception: + return CopyError( + upload_id=upload_id, + path=f'oss://{self._reqeust.bucket}/{self._reqeust.key}', + error=error + ) + + def timeout_handle(self, upload_id: str): + raise exceptions.TimeoutError(upload_id=upload_id, error='single copy timeout') \ No newline at end of file diff --git a/alibabacloud_oss_v2/exceptions.py b/alibabacloud_oss_v2/exceptions.py index c0125e8..6efc119 100644 --- a/alibabacloud_oss_v2/exceptions.py +++ b/alibabacloud_oss_v2/exceptions.py @@ -223,3 +223,9 @@ class FileNotReadable(BaseError): File is not readable. """ fmt = 'File is not readable, {filepath}' + +class TimeoutError(BaseError): + """ + Timeout error. + """ + fmt = 'Timeout exception: upload id: {upload_id}, error: {error}' \ No newline at end of file diff --git a/sample/copier.py b/sample/copier.py new file mode 100644 index 0000000..108b756 --- /dev/null +++ b/sample/copier.py @@ -0,0 +1,61 @@ +import argparse +import alibabacloud_oss_v2 as oss + +parser = argparse.ArgumentParser(description="copier sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--source_key', help='The name of the source address for object.', required=True) +parser.add_argument('--source_bucket', help='The name of the source address for bucket.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # copier = client.copier( + # part_size=1000 * 1024, + # parallel_num=5, + # leave_parts_on_error=True, + # disable_shallow_copy=True, + # ) + # + # result = copier.copy(oss.CopyObjectRequest( + # bucket=args.bucket, + # key=args.key, + # source_bucket=args.source_bucket, + # source_key=args.source_key + # )) + + copier = client.copier() + + result = copier.copy(oss.CopyObjectRequest( + bucket=args.bucket, + key=args.key, + source_bucket=args.source_bucket, + source_key=args.source_key, + ), + part_size=1000 * 1024, + parallel_num=5, + leave_parts_on_error=True, + disable_shallow_copy=True, + ) + + print(vars(result)) + +if __name__ == "__main__": + main() + diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py index b22cce8..93750fc 100644 --- a/tests/integration/test_client.py +++ b/tests/integration/test_client.py @@ -6,6 +6,7 @@ import requests import alibabacloud_oss_v2 as oss from . import TestIntegration, random_bucket_name, random_str, REGION, OBJECTNAME_PREFIX, get_client +from urllib.parse import quote, unquote class TestBucketBasic(TestIntegration): @@ -927,6 +928,1029 @@ def test_put_object_acl_fail(self): self.assertIn('PutObjectAcl', str(e)) self.assertIn('Endpoint: PUT', str(e)) + def test_copier(self): + length = 12 + data = random_str(length) + length_2 = 1024 * 1024 + data_2 = random_str(length_2) + key = OBJECTNAME_PREFIX + random_str(16) + key_2 = OBJECTNAME_PREFIX + random_str(16) + bucket_name = random_bucket_name() + + # put bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put object data + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # put object data2 + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key_2, + body=data_2, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # case 1: Copy a single part from the same bucket + single_key = 'single_key' + result = self.client.copier().copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=single_key, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + + result_single_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=single_key, + )) + self.assertEqual("Normal", result_single_same_bucket_2.object_type) + self.assertEqual(12, result_single_same_bucket_1.content_length) + self.assertEqual(12, result_single_same_bucket_2.content_length) + + # case 2: Copy a multipart from the same bucket + multi_key = 'multipart_key' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + disable_shallow_copy=True, + ).copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=multi_key, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=multi_key, + )) + self.assertEqual("Multipart", result_multi_same_bucket_2.object_type) + self.assertEqual(1024 * 1024, result_multi_same_bucket_1.content_length) + self.assertEqual(1024 * 1024, result_multi_same_bucket_2.content_length) + + # case 3: Copy a single part from the different bucket + single_key_different_bucket = 'single_key_different_bucket' + result = self.client.copier( + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_single_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual("Normal", result_single_key_different_bucket_2.object_type) + self.assertEqual(12, result_single_key_different_bucket_1.content_length) + self.assertEqual(12, result_single_key_different_bucket_2.content_length) + + # case 4: Copy a multipart from the different bucket + multi_key_different_bucket = 'multipart_key_different_bucket' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + self.assertEqual("Multipart", result_multi_key_different_bucket_2.object_type) + self.assertEqual(1024 * 1024, result_multi_key_different_bucket_1.content_length) + self.assertEqual(1024 * 1024, result_multi_key_different_bucket_2.content_length) + + def test_copier_metadata(self): + length = 12 + data = random_str(length) + length_2 = 1024 * 1024 + data_2 = random_str(length_2) + key = OBJECTNAME_PREFIX + random_str(16) + key_2 = OBJECTNAME_PREFIX + random_str(16) + bucket_name = random_bucket_name() + + # put bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put object data + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + "x-oss-auth": "owner", + "x-oss-meta-version": "1.01", + "flag": "false", + "content-type": "utf-8", + "Content-Disposition": "attachment;filename=aaa.txt", + "Content-Length": "344606", + }, + tagging='TagA=A&TagB=B', + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # put object data2 + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key_2, + metadata={ + "x-oss-auth": "owner", + "x-oss-meta-version": "1.01", + "flag": "false", + "content-type": "utf-8", + "Content-Disposition": "attachment;filename=aaa.txt", + "Content-Length": "344606", + }, + tagging='TagA=A&TagB=B', + body=data_2, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # case 1: Copy a single part from the same bucket + single_key = 'single_key' + result = self.client.copier().copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=single_key, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + + result_single_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=single_key, + )) + self.assertEqual("Normal", result_single_same_bucket_2.object_type) + self.assertEqual(12, result_single_same_bucket_1.content_length) + self.assertEqual(12, result_single_same_bucket_2.content_length) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_same_bucket_1.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_same_bucket_1.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_same_bucket_1.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_same_bucket_1.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.01", result_single_same_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_single_same_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_single_same_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_single_same_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_single_same_bucket_1.metadata.get('Content-Length')) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_same_bucket_2.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_same_bucket_2.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_same_bucket_2.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_same_bucket_2.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.01", result_single_same_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_single_same_bucket_2.metadata.get('flag')) + self.assertEqual("utf-8", result_single_same_bucket_2.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_single_same_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_single_same_bucket_2.metadata.get('Content-Length')) + + result_tag_single_same_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key, + )) + + result_tag_single_same_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=single_key, + )) + self.assertEqual(result_tag_single_same_bucket_1.tag_set.tags, result_tag_single_same_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_single_same_bucket_1.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + # case 2: Copy a multipart from the same bucket + multi_key = 'multipart_key' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + disable_shallow_copy=True, + ).copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=multi_key, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=multi_key, + )) + self.assertEqual("Multipart", result_multi_same_bucket_2.object_type) + self.assertEqual(1024 * 1024, result_multi_same_bucket_1.content_length) + self.assertEqual(1024 * 1024, result_multi_same_bucket_2.content_length) + self.assertEqual("1.01", result_multi_same_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_multi_same_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_multi_same_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_multi_same_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_multi_same_bucket_1.metadata.get('Content-Length')) + self.assertEqual("1.01", result_multi_same_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_multi_same_bucket_2.metadata.get('flag')) + self.assertEqual("utf-8", result_multi_same_bucket_2.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_multi_same_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_multi_same_bucket_2.metadata.get('Content-Length')) + + result_tag_multi_same_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_tag_multi_same_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=multi_key, + )) + self.assertEqual(result_tag_multi_same_bucket_1.tag_set.tags, result_tag_multi_same_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_multi_same_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + # case 3: Copy a single part from the different bucket + single_key_different_bucket = 'single_key_different_bucket' + result = self.client.copier( + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_single_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual("Normal", result_single_key_different_bucket_2.object_type) + self.assertEqual(12, result_single_key_different_bucket_1.content_length) + self.assertEqual(12, result_single_key_different_bucket_2.content_length) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_key_different_bucket_1.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_key_different_bucket_1.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_key_different_bucket_1.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_key_different_bucket_1.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.01", result_single_key_different_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_single_key_different_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_single_key_different_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_single_key_different_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_single_key_different_bucket_1.metadata.get('Content-Length')) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_key_different_bucket_2.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_key_different_bucket_2.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_key_different_bucket_2.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_key_different_bucket_2.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.01", result_single_key_different_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_single_key_different_bucket_2.metadata.get('flag')) + self.assertEqual("utf-8", result_single_key_different_bucket_2.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_single_key_different_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_single_key_different_bucket_2.metadata.get('Content-Length')) + + result_tag_single_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key, + )) + + result_tag_single_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual(result_tag_single_key_different_bucket_1.tag_set.tags, result_tag_single_key_different_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_single_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + # case 4: Copy a multipart from the different bucket + multi_key_different_bucket = 'multipart_key_different_bucket' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + self.assertEqual("Multipart", result_multi_key_different_bucket_2.object_type) + self.assertEqual(1024 * 1024, result_multi_key_different_bucket_1.content_length) + self.assertEqual(1024 * 1024, result_multi_key_different_bucket_2.content_length) + self.assertEqual("1.01", result_multi_key_different_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_multi_key_different_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_multi_key_different_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_multi_key_different_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_multi_key_different_bucket_1.metadata.get('Content-Length')) + self.assertEqual("1.01", result_multi_key_different_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_multi_key_different_bucket_2.metadata.get('flag')) + self.assertEqual("utf-8", result_multi_key_different_bucket_2.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_multi_key_different_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_multi_key_different_bucket_2.metadata.get('Content-Length')) + + result_tag_multi_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_tag_multi_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + self.assertEqual(result_tag_multi_key_different_bucket_1.tag_set.tags, result_tag_multi_key_different_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_multi_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + def test_copier_header(self): + length = 12 + data = random_str(length) + length_2 = 1000 * 1024 + data_2 = random_str(length_2) + key = OBJECTNAME_PREFIX + random_str(16) + key_2 = OBJECTNAME_PREFIX + random_str(16) + bucket_name = random_bucket_name() + + # put bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put object data + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + headers={ + 'Content-Type': 'text/plain', + 'Cache-Control': 'no-cache', + 'content-language': 'zh-CN', + 'content-encoding': 'gzip', + 'Content-Disposition': 'attachment;filename='+quote('世界')+'.txt', + 'Expires': '2022-10-12T00:00:00.000Z', + 'auth': 'owner', + 'x-oss-auth': 'owner2', + 'x-oss-meta-auth': 'owner3', + }, + tagging='TagA=A&TagB=B', + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # put object data2 + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key_2, + headers={ + 'Content-Type': 'text/plain', + 'Cache-Control': 'no-cache', + 'content-language': 'zh-CN', + 'content-encoding': 'gzip', + 'Content-Disposition': 'attachment;filename='+quote('世界')+'.txt', + 'Expires': '2022-10-12T00:00:00.000Z', + 'auth': 'owner', + 'x-oss-auth': 'owner2', + 'x-oss-meta-auth': 'owner3', + }, + tagging='TagA=A&TagB=B', + body=data_2, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # case 1: Copy a single part from the same bucket + single_key = 'single_key' + result = self.client.copier().copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=single_key, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + + result_single_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=single_key, + )) + self.assertEqual("Normal", result_single_same_bucket_2.object_type) + self.assertEqual(12, result_single_same_bucket_1.content_length) + self.assertEqual(12, result_single_same_bucket_2.content_length) + self.assertEqual('text/plain', result_single_same_bucket_1.headers.get('Content-Type')) + self.assertEqual('no-cache', result_single_same_bucket_1.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_single_same_bucket_1.headers.get('Content-Language')) + self.assertEqual('gzip', result_single_same_bucket_1.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_single_same_bucket_1.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_single_same_bucket_1.headers.get('Expires')) + self.assertEqual('owner3', result_single_same_bucket_1.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_single_same_bucket_1.metadata.get('auth')) + self.assertEqual('text/plain', result_single_same_bucket_2.headers.get('Content-Type')) + self.assertEqual('no-cache', result_single_same_bucket_2.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_single_same_bucket_2.headers.get('Content-Language')) + self.assertEqual('gzip', result_single_same_bucket_2.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_single_same_bucket_2.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_single_same_bucket_2.headers.get('Expires')) + self.assertEqual('owner3', result_single_same_bucket_2.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_single_same_bucket_2.metadata.get('auth')) + self.assertEqual(None, result_single_same_bucket_1.metadata.get('x-oss-auth')) + self.assertEqual(None, result_single_same_bucket_1.metadata.get('x-oss-meta-auth')) + self.assertEqual(None, result_single_same_bucket_2.metadata.get('x-oss-auth')) + self.assertEqual(None, result_single_same_bucket_2.metadata.get('x-oss-meta-auth')) + + result_tag_single_same_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key, + )) + + result_tag_single_same_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=single_key, + )) + self.assertEqual(result_tag_single_same_bucket_1.tag_set.tags, result_tag_single_same_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_single_same_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + # case 2: Copy a multipart from the same bucket + multi_key = 'multipart_key' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + disable_shallow_copy=True, + ).copy(oss.CopyObjectRequest( + bucket=self.bucket_name, + key=multi_key, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_same_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_same_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=multi_key, + )) + self.assertEqual("Multipart", result_multi_same_bucket_2.object_type) + self.assertEqual(1000 * 1024, result_multi_same_bucket_1.content_length) + self.assertEqual(1000 * 1024, result_multi_same_bucket_2.content_length) + self.assertEqual("Multipart", result_multi_same_bucket_2.object_type) + self.assertEqual('text/plain', result_multi_same_bucket_1.headers.get('Content-Type')) + self.assertEqual('no-cache', result_multi_same_bucket_1.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_multi_same_bucket_1.headers.get('Content-Language')) + self.assertEqual('gzip', result_multi_same_bucket_1.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_multi_same_bucket_1.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_multi_same_bucket_1.headers.get('Expires')) + self.assertEqual('owner3', result_multi_same_bucket_1.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_multi_same_bucket_1.metadata.get('auth')) + self.assertEqual('text/plain', result_multi_same_bucket_2.headers.get('Content-Type')) + self.assertEqual('no-cache', result_multi_same_bucket_2.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_multi_same_bucket_2.headers.get('Content-Language')) + self.assertEqual('gzip', result_multi_same_bucket_2.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_multi_same_bucket_2.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_multi_same_bucket_2.headers.get('Expires')) + self.assertEqual('owner3', result_multi_same_bucket_2.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_multi_same_bucket_2.metadata.get('auth')) + self.assertEqual(None, result_multi_same_bucket_1.metadata.get('x-oss-auth')) + self.assertEqual(None, result_multi_same_bucket_1.metadata.get('x-oss-meta-auth')) + self.assertEqual(None, result_multi_same_bucket_2.metadata.get('x-oss-auth')) + self.assertEqual(None, result_multi_same_bucket_2.metadata.get('x-oss-meta-auth')) + + result_tag_multi_same_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_tag_multi_same_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=multi_key, + )) + self.assertEqual(result_tag_multi_same_bucket_1.tag_set.tags, result_tag_multi_same_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_multi_same_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + + # case 3: Copy a single part from the different bucket + single_key_different_bucket = 'single_key_different_bucket' + result = self.client.copier( + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_single_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual("Normal", result_single_key_different_bucket_2.object_type) + self.assertEqual(12, result_single_key_different_bucket_1.content_length) + self.assertEqual(12, result_single_key_different_bucket_2.content_length) + self.assertEqual('text/plain', result_single_key_different_bucket_1.headers.get('Content-Type')) + self.assertEqual('no-cache', result_single_key_different_bucket_1.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_single_key_different_bucket_1.headers.get('Content-Language')) + self.assertEqual('gzip', result_single_key_different_bucket_1.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_single_key_different_bucket_1.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_single_key_different_bucket_1.headers.get('Expires')) + self.assertEqual('owner3', result_single_key_different_bucket_1.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_single_key_different_bucket_1.metadata.get('auth')) + self.assertEqual('text/plain', result_single_key_different_bucket_2.headers.get('Content-Type')) + self.assertEqual('no-cache', result_single_key_different_bucket_2.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_single_key_different_bucket_2.headers.get('Content-Language')) + self.assertEqual('gzip', result_single_key_different_bucket_2.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_single_key_different_bucket_2.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_single_key_different_bucket_2.headers.get('Expires')) + self.assertEqual('owner3', result_single_key_different_bucket_2.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_single_key_different_bucket_2.metadata.get('auth')) + self.assertEqual(None, result_single_key_different_bucket_1.metadata.get('x-oss-auth')) + self.assertEqual(None, result_single_key_different_bucket_1.metadata.get('x-oss-meta-auth')) + self.assertEqual(None, result_single_key_different_bucket_2.metadata.get('x-oss-auth')) + self.assertEqual(None, result_single_key_different_bucket_2.metadata.get('x-oss-meta-auth')) + + result_tag_single_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key, + )) + + result_tag_single_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual(result_tag_single_key_different_bucket_1.tag_set.tags, result_tag_single_key_different_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_single_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + # case 4: Copy a multipart from the different bucket + multi_key_different_bucket = 'multipart_key_different_bucket' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key_2 + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + self.assertEqual("Multipart", result_multi_key_different_bucket_2.object_type) + self.assertEqual(1000 * 1024, result_multi_key_different_bucket_1.content_length) + self.assertEqual(1000 * 1024, result_multi_key_different_bucket_2.content_length) + self.assertEqual('text/plain', result_multi_key_different_bucket_1.headers.get('Content-Type')) + self.assertEqual('no-cache', result_multi_key_different_bucket_1.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_multi_key_different_bucket_1.headers.get('Content-Language')) + self.assertEqual('gzip', result_multi_key_different_bucket_1.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_multi_key_different_bucket_1.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_multi_key_different_bucket_1.headers.get('Expires')) + self.assertEqual('owner3', result_multi_key_different_bucket_1.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_multi_key_different_bucket_1.metadata.get('auth')) + self.assertEqual('text/plain', result_multi_key_different_bucket_2.headers.get('Content-Type')) + self.assertEqual('no-cache', result_single_key_different_bucket_2.headers.get('Cache-Control')) + self.assertEqual('zh-CN', result_multi_key_different_bucket_2.headers.get('Content-Language')) + self.assertEqual('gzip', result_multi_key_different_bucket_2.headers.get('Content-Encoding')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_multi_key_different_bucket_2.headers.get('Content-Disposition')) + self.assertEqual('2022-10-12T00:00:00.000Z', result_multi_key_different_bucket_2.headers.get('Expires')) + self.assertEqual('owner3', result_multi_key_different_bucket_2.headers.get('x-oss-meta-auth')) + self.assertEqual('owner3', result_multi_key_different_bucket_2.metadata.get('auth')) + self.assertEqual(None, result_multi_key_different_bucket_1.metadata.get('x-oss-auth')) + self.assertEqual(None, result_multi_key_different_bucket_1.metadata.get('x-oss-meta-auth')) + self.assertEqual(None, result_multi_key_different_bucket_2.metadata.get('x-oss-auth')) + self.assertEqual(None, result_multi_key_different_bucket_2.metadata.get('x-oss-meta-auth')) + + result_tag_multi_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_tag_multi_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + self.assertEqual(result_tag_multi_key_different_bucket_1.tag_set.tags, result_tag_multi_key_different_bucket_2.tag_set.tags) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_multi_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + def test_copier_different_bucket_single_replace_meta_and_tag(self): + length = 12 + data = random_str(length) + key = OBJECTNAME_PREFIX + random_str(16) + bucket_name = random_bucket_name() + + # put bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put object data + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + "x-oss-auth": "owner", + "x-oss-meta-version": "1.01", + "flag": "false", + "content-type": "utf-8", + "Content-Disposition": "attachment;filename=aaa.txt", + "Content-Length": "344606", + }, + tagging='TagA=A&TagB=B', + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # case 3: Copy a single part from the different bucket + single_key_different_bucket = 'single_key_different_bucket' + result = self.client.copier( + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key, + metadata_directive='replace', + metadata={ + "x-oss-auth": "customer-owner", + "x-oss-meta-version": "1.23", + "flag": "true", + "content-type": "text/txt", + 'Content-Disposition': 'attachment;filename='+quote('世界')+'.txt', + "Content-Length": "116", + }, + tagging='TagA3=A3&TagB3=B3', + tagging_directive='replace' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_single_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result_single_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + self.assertEqual("Normal", result_single_key_different_bucket_2.object_type) + self.assertEqual(12, result_single_key_different_bucket_1.content_length) + self.assertEqual(12, result_single_key_different_bucket_2.content_length) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_key_different_bucket_1.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_key_different_bucket_1.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_key_different_bucket_1.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_key_different_bucket_1.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.01", result_single_key_different_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_single_key_different_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_single_key_different_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_single_key_different_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_single_key_different_bucket_1.metadata.get('Content-Length')) + self.assertEqual("nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_single_key_different_bucket_2.metadata.get('client-side-encryption-key')) + self.assertEqual("De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_single_key_different_bucket_2.metadata.get('client-side-encryption-start')) + self.assertEqual("AES/CTR/NoPadding", result_single_key_different_bucket_2.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("RSA/NONE/PKCS1Padding", result_single_key_different_bucket_2.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1.23", result_single_key_different_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("true", result_single_key_different_bucket_2.metadata.get('flag')) + self.assertEqual("text/txt", result_single_key_different_bucket_2.metadata.get('content-type')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_single_key_different_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("116", result_single_key_different_bucket_2.metadata.get('Content-Length')) + + result_tag_single_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key, + )) + tags = [] + tags_str = '' + for o in result_tag_single_key_different_bucket_1.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA=A&TagB=B", tags_str) + + result_tag_single_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=single_key_different_bucket, + )) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_single_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA3=A3&TagB3=B3", tags_str) + + def test_copier_different_bucket_multi_replace_meta_and_tag(self): + length_2 = 1000 * 1024 + data_2 = random_str(length_2) + key_2 = OBJECTNAME_PREFIX + random_str(16) + bucket_name = random_bucket_name() + + # put bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put object data2 + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key_2, + metadata={ + "x-oss-auth": "owner", + "x-oss-meta-version": "1.01", + "flag": "false", + "content-type": "utf-8", + "Content-Disposition": "attachment;filename=aaa.txt", + "Content-Length": "344606", + }, + tagging='TagA2=A2&TagB2=B2', + body=data_2, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + # case 4: Copy a multipart from the different bucket + multi_key_different_bucket = 'multipart_key_different_bucket' + result = self.client.copier( + part_size=100*1024, + parallel_num=5, + leave_parts_on_error=True, + ).copy(oss.CopyObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + source_bucket=self.bucket_name, + source_key=key_2, + metadata_directive='replace', + metadata={ + "client-side-encryption-key": "THIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "oSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "CTR", + "client-side-encryption-wrap-alg": "PKCS1Padding", + "client-side-encryption-data-size": "1024000", + "client-side-encryption-part-size": "102400", + "x-oss-auth": "customer-owner", + "x-oss-meta-version": "1.23", + "flag": "true", + "content-type": "text/txt", + 'Content-Disposition': 'attachment;filename=' + quote('世界') + '.txt', + "Content-Length": "116", + }, + tagging='TagA3=A3&TagB3=B3', + tagging_directive='replace' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result_multi_key_different_bucket_1 = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key_2, + )) + + result_multi_key_different_bucket_2 = self.client.head_object(oss.HeadObjectRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + + self.assertEqual("Multipart", result_multi_key_different_bucket_2.object_type) + self.assertEqual(1000 * 1024, result_multi_key_different_bucket_1.content_length) + self.assertEqual(1000 * 1024, result_multi_key_different_bucket_2.content_length) + self.assertEqual("1.01", result_multi_key_different_bucket_1.metadata.get('x-oss-meta-version')) + self.assertEqual("false", result_multi_key_different_bucket_1.metadata.get('flag')) + self.assertEqual("utf-8", result_multi_key_different_bucket_1.metadata.get('content-type')) + self.assertEqual("attachment;filename=aaa.txt", result_multi_key_different_bucket_1.metadata.get('Content-Disposition')) + self.assertEqual("344606", result_multi_key_different_bucket_1.metadata.get('Content-Length')) + self.assertEqual("THIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-key')) + self.assertEqual("oSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-start')) + self.assertEqual("CTR", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-cek-alg')) + self.assertEqual("PKCS1Padding", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-wrap-alg')) + self.assertEqual("1024000", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-data-size')) + self.assertEqual("102400", result_multi_key_different_bucket_2.metadata.get('client-side-encryption-part-size')) + self.assertEqual("1.23", result_multi_key_different_bucket_2.metadata.get('x-oss-meta-version')) + self.assertEqual("true", result_multi_key_different_bucket_2.metadata.get('flag')) + self.assertEqual("text/txt", result_multi_key_different_bucket_2.metadata.get('content-type')) + self.assertEqual('attachment;filename='+quote('世界')+'.txt', result_multi_key_different_bucket_2.metadata.get('Content-Disposition')) + self.assertEqual("116", result_multi_key_different_bucket_2.metadata.get('Content-Length')) + + result_tag_multi_key_different_bucket_1 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=self.bucket_name, + key=key_2, + )) + tags = [] + tags_str = '' + for o in result_tag_multi_key_different_bucket_1.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA2=A2&TagB2=B2", tags_str) + + result_tag_multi_key_different_bucket_2 = self.client.get_object_tagging(oss.GetObjectTaggingRequest( + bucket=bucket_name, + key=multi_key_different_bucket, + )) + + # compare tags + tags = [] + tags_str = '' + for o in result_tag_multi_key_different_bucket_2.tag_set.tags: + tags.append(f"{str(o.key)}={str(o.value)}") + if tags: + tags_str = '&'.join(tags) + self.assertEqual("TagA3=A3&TagB3=B3", tags_str) + class TestMultipartUpload(TestIntegration): def test_multipart_upload_object(self): length1 = 100*1024