diff --git a/alibabacloud_oss_v2/vectors/endpoints.py b/alibabacloud_oss_v2/vectors/endpoints.py index 71ffbd8..9a2a2fe 100644 --- a/alibabacloud_oss_v2/vectors/endpoints.py +++ b/alibabacloud_oss_v2/vectors/endpoints.py @@ -6,9 +6,9 @@ def from_region(region: str, etype: str) -> str: """Generate vectors endpoint from region""" if etype == "internal": - return f"oss-{region}-internal.oss-vectors.aliyuncs.com" + return f"{region}-internal.oss-vectors.aliyuncs.com" else: - return f"oss-{region}.oss-vectors.aliyuncs.com" + return f"{region}.oss-vectors.aliyuncs.com" class VectorsEndpointProvider(EndpointProvider): diff --git a/alibabacloud_oss_v2/vectors/models/index_basic.py b/alibabacloud_oss_v2/vectors/models/index_basic.py index 9833aae..150c9d5 100644 --- a/alibabacloud_oss_v2/vectors/models/index_basic.py +++ b/alibabacloud_oss_v2/vectors/models/index_basic.py @@ -89,24 +89,20 @@ class GetVectorIndexResult(serde.ResultModel): _attribute_map = { 'index': {'tag': 'output', 'position': 'body', 'rename': 'index', 'type': 'dict'}, - 'vector_bucket_name': {'tag': 'output', 'position': 'body', 'rename': 'vectorBucketName', 'type': 'str'}, } def __init__( self, index: Optional[Dict] = None, - vector_bucket_name: Optional[str] = None, **kwargs: Any ) -> None: """ Args: index (Dict, optional): The vector index information. - vector_bucket_name (str, optional): The name of the vector bucket. """ super().__init__(**kwargs) self.index = index - self.vector_bucket_name = vector_bucket_name # List diff --git a/sample/vectors/get_vector_index.py b/sample/vectors/get_vector_index.py index aaf732f..7c26e3a 100644 --- a/sample/vectors/get_vector_index.py +++ b/sample/vectors/get_vector_index.py @@ -36,7 +36,6 @@ def main(): if result.index: print(f'index name: {result.index}') - print(f'vector bucket name: {result.vector_bucket_name}') diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 5b0dbfa..97ed234 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -18,6 +18,7 @@ ACCESS_ID = os.getenv("OSS_TEST_ACCESS_KEY_ID") ACCESS_KEY = os.getenv("OSS_TEST_ACCESS_KEY_SECRET") ENDPOINT = os.getenv("OSS_TEST_ENDPOINT") +VECTOR_ENDPOINT = os.getenv("OSS_TEST_VECTOR_ENDPOINT") REGION = os.getenv("OSS_TEST_REGION", "cn-hangzhou") RAM_ROLE_ARN = os.getenv("OSS_TEST_RAM_ROLE_ARN") SIGNATURE_VERSION = os.getenv("OSS_TEST_SIGNATURE_VERSION") @@ -102,7 +103,7 @@ def get_vectors_client() -> oss_vectors.Client: cfg = oss.config.load_default() cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY) cfg.region = REGION - cfg.endpoint = ENDPOINT + cfg.endpoint = VECTOR_ENDPOINT cfg.account_id = USER_ID return oss_vectors.Client(cfg) @@ -147,6 +148,9 @@ def random_str(n): def random_bucket_name(): return BUCKETNAME_PREFIX + random_lowstr(4) + '-' + str(int(datetime.datetime.now(datetime.timezone.utc).timestamp())) +def random_short_bucket_name(): + return BUCKETNAME_PREFIX + random_lowstr(7) + def clean_objects(client:oss.Client, bucket_name:str) -> None: marker = '' is_truncated = True @@ -253,7 +257,80 @@ def sts_assume_role(access_key_id:str, access_key_secret:str, role_arn:str) -> d response = requests.get(assume_url) return json.loads(response.content) - + + +def clean_vector_buckets(prefix: str) -> None: + vector_client = get_vectors_client() + + paginator = vector_client.list_vector_buckets_paginator() + for page in paginator.iter_page(oss_vectors.models.ListVectorBucketsRequest(prefix=prefix)): + for bucket in page.buckets: + actual_bucket_name = bucket.name.split(':')[-1] + + # Clean all content in the bucket (indexes and vectors) + clean_vector_bucket_content(vector_client, actual_bucket_name) + + # Delete the bucket itself + delete_vector_bucket(actual_bucket_name) + +def clean_vector_bucket_content(client: oss_vectors.Client, full_bucket_name: str) -> None: + """ + Clean all content in the specified vector bucket (including indexes and vectors) + """ + # Clean all vector indexes + clean_vector_indexes(client, full_bucket_name) + + +def clean_vector_indexes(client: oss_vectors.Client, bucket_name: str) -> None: + """ + Clean all vector indexes in the specified bucket and their contained vectors + """ + paginator_index = client.list_vector_indexes_paginator() + for page_index in paginator_index.iter_page( + oss_vectors.models.ListVectorIndexesRequest(bucket=bucket_name) + ): + for index in page_index.indexes: + # Clean all vectors in the index + clean_vectors(client, bucket_name, index.get("indexName")) + + # Delete the vector index + client.delete_vector_index(oss_vectors.models.DeleteVectorIndexRequest( + bucket=bucket_name, + index_name=index.get("indexName"), + )) + + +def clean_vectors(client: oss_vectors.Client, bucket_name: str, index_name: str) -> None: + """ + Clean all vectors in the specified index + """ + paginator = client.list_vectors_paginator() + request = oss_vectors.models.ListVectorsRequest( + bucket=bucket_name, + index_name=index_name, + ) + + for page_vector in paginator.iter_page(request): + keys = [] + for vec in page_vector.vectors: + keys.append(vec.get("key")) + + # Delete all vectors on the current page + if keys: + client.delete_vectors(oss_vectors.models.DeleteVectorsRequest( + bucket=bucket_name, + index_name=index_name, + keys=keys + )) + + +def delete_vector_bucket(bucket_name: str) -> None: + """ + Delete vector bucket + """ + client = get_vectors_client() + client.delete_vector_bucket(oss_vectors.models.DeleteVectorBucketRequest(bucket=bucket_name)) + class TestIntegrationVectors(TestIntegration): @@ -262,8 +339,16 @@ def setUpClass(cls): TestIntegration.setUpClass() cls.vector_client = get_vectors_client() + vector_bucket_name = random_short_bucket_name() + result = cls.vector_client.put_vector_bucket( + oss_vectors.models.PutVectorBucketRequest( + bucket=vector_bucket_name, + ) + ) + cls.vector_bucket_name = vector_bucket_name + @classmethod def tearDownClass(cls): TestIntegration.tearDownClass() - + clean_vector_buckets(BUCKETNAME_PREFIX) diff --git a/tests/integration/vector/test_vector_basic_client.py b/tests/integration/vector/test_vector_basic_client.py index 156127c..ac1e935 100644 --- a/tests/integration/vector/test_vector_basic_client.py +++ b/tests/integration/vector/test_vector_basic_client.py @@ -1,7 +1,8 @@ # pylint: skip-file +from ast import literal_eval import alibabacloud_oss_v2.vectors as oss_vectors -from tests.integration import TestIntegrationVectors, random_bucket_name +from tests.integration import TestIntegrationVectors, random_short_bucket_name class TestVectorBasic(TestIntegrationVectors): @@ -10,7 +11,9 @@ class TestVectorBasic(TestIntegrationVectors): def test_vector_basic(self): """Test put, get, list, delete and query vector operations.""" # 1. Create bucket - bucket_name = random_bucket_name() + bucket_name = random_short_bucket_name() + vector_key = "vector-key-1" + result = self.vector_client.put_vector_bucket(oss_vectors.models.PutVectorBucketRequest( bucket=bucket_name, )) @@ -20,33 +23,32 @@ def test_vector_basic(self): self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) # 2. Create index (required for vector operations) - index_name = 'test-index' - dimension = 128 - distance_metric = 'EUCLIDEAN' - data_type = 'vector' + index_name = 'testIndexForIntegrationVector' + dimension = 3 + distance_metric = 'cosine' + data_type = 'float32' metadata = {"nonFilterableMetadataKeys": ["key1", "key2"]} - put_index_result = self.vector_client.put_vector_index(oss_vectors.models.PutVectorIndexRequest( + + put_index_request = oss_vectors.models.PutVectorIndexRequest( bucket=bucket_name, data_type=data_type, dimension=dimension, distance_metric=distance_metric, index_name=index_name, metadata=metadata - )) - self.assertEqual(200, put_index_result.status_code) - self.assertEqual('OK', put_index_result.status) - self.assertEqual(24, len(put_index_result.request_id)) - self.assertEqual(24, len(put_index_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(put_index_result.index) - self.assertEqual(index_name, put_index_result.index.index_name) - self.assertEqual(dimension, put_index_result.index.dimension) - self.assertEqual(distance_metric, put_index_result.index.distance_metric) + ) + + put_result = self.vector_client.put_vector_index(put_index_request) + self.assertEqual(200, put_result.status_code) + self.assertEqual('OK', put_result.status) + self.assertEqual(24, len(put_result.request_id)) + self.assertEqual(24, len(put_result.headers.get('x-oss-request-id'))) # 3. Put vectors vectors_to_put = [ { "data": {"float32": [0.1, 0.2, 0.3]}, - "key": "vector-key-1", + "key": vector_key, "metadata": {"key1": "value1", "key2": "value2"} } ] @@ -60,13 +62,12 @@ def test_vector_basic(self): self.assertEqual('OK', put_result.status) self.assertEqual(24, len(put_result.request_id)) self.assertEqual(24, len(put_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(put_result.result) # 4. Get vectors get_result = self.vector_client.get_vectors(oss_vectors.models.GetVectorsRequest( bucket=bucket_name, index_name=index_name, - keys=['vector-key-1'], + keys=[vector_key], return_data=True, return_metadata=True )) @@ -74,61 +75,70 @@ def test_vector_basic(self): self.assertEqual('OK', get_result.status) self.assertEqual(24, len(get_result.request_id)) self.assertEqual(24, len(get_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(get_result.result) - self.assertIsNotNone(get_result.result.vectors) - self.assertEqual(1, len(get_result.result.vectors)) - self.assertEqual("vector-key-1", get_result.result.vectors[0].key) - self.assertIsNotNone(get_result.result.vectors[0].data) - self.assertIsNotNone(get_result.result.vectors[0].metadata) + self.assertIsNotNone(get_result.vectors) + self.assertEqual(1, len(get_result.vectors)) + self.assertEqual(vector_key, get_result.vectors[0].get('key')) + self.assertIsNotNone(get_result.vectors[0].get('data')) + self.assertIsNotNone(get_result.vectors[0].get('metadata')) + self.assertEqual(literal_eval('{"key1": "value1", "key2": "value2"}'), get_result.vectors[0].get('metadata')) # 5. List vectors list_result = self.vector_client.list_vectors(oss_vectors.models.ListVectorsRequest( bucket=bucket_name, index_name=index_name, max_results=100, - next_token='', return_data=True, return_metadata=True, - segment_count=5, - segment_index=2 + segment_count=2, + segment_index=1 )) self.assertEqual(200, list_result.status_code) self.assertEqual('OK', list_result.status) self.assertEqual(24, len(list_result.request_id)) self.assertEqual(24, len(list_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(list_result.result) - self.assertIsNotNone(list_result.result.vectors) + self.assertIsNotNone(list_result.vectors) # Check that we have at least one vector in the list - self.assertGreaterEqual(len(list_result.result.vectors), 1) + self.assertGreaterEqual(len(list_result.vectors), 0) + self.assertEqual(vector_key, list_result.vectors[0].get('key')) + self.assertIsNotNone(list_result.vectors[0].get('data')) + self.assertIsNotNone(list_result.vectors[0].get('metadata')) + self.assertEqual(literal_eval('{"key1": "value1", "key2": "value2"}'), list_result.vectors[0].get('metadata')) + # 6. Query vectors - query_vector = [0.1, 0.2, 0.3] + query_filter = { + "$and": [{ + "type": { + "$nin": ["comedy", "documentary"] + } + }] + } + query_vector = {"float32": [0.1, 0.2, 0.3]} query_result = self.vector_client.query_vectors(oss_vectors.models.QueryVectorsRequest( bucket=bucket_name, index_name=index_name, - vector=query_vector, - top_k=1 + query_vector=query_vector, + filter=query_filter, + return_distance=True, + return_metadata=True, + top_k=10 )) self.assertEqual(200, query_result.status_code) self.assertEqual('OK', query_result.status) self.assertEqual(24, len(query_result.request_id)) self.assertEqual(24, len(query_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(query_result.result) - self.assertIsNotNone(query_result.result.matches) - # Check that we have matches returned - self.assertGreaterEqual(len(query_result.result.matches), 0) + self.assertIsNotNone(query_result) + # 7. Delete vectors delete_result = self.vector_client.delete_vectors(oss_vectors.models.DeleteVectorsRequest( bucket=bucket_name, index_name=index_name, - keys=['vector-key-1'] + keys=[vector_key] )) - self.assertEqual(200, delete_result.status_code) - self.assertEqual('OK', delete_result.status) + self.assertEqual(204, delete_result.status_code) self.assertEqual(24, len(delete_result.request_id)) self.assertEqual(24, len(delete_result.headers.get('x-oss-request-id'))) - self.assertIsNotNone(delete_result.result) # 8. Get index to verify it exists get_index_result = self.vector_client.get_vector_index(oss_vectors.models.GetVectorIndexRequest( @@ -140,7 +150,7 @@ def test_vector_basic(self): self.assertEqual(24, len(get_index_result.request_id)) self.assertEqual(24, len(get_index_result.headers.get('x-oss-request-id'))) self.assertIsNotNone(get_index_result.index) - self.assertEqual(index_name, get_index_result.index.index_name) + self.assertEqual(index_name, get_index_result.index.get('indexName')) # 9. List indexes list_index_result = self.vector_client.list_vector_indexes(oss_vectors.models.ListVectorIndexesRequest( @@ -152,17 +162,17 @@ def test_vector_basic(self): self.assertEqual(24, len(list_index_result.headers.get('x-oss-request-id'))) self.assertIsNotNone(list_index_result.indexes) # Check that we have at least one index in the list - self.assertGreaterEqual(len(list_index_result.indexes), 1) + self.assertEqual(len(list_index_result.indexes), 1) # Verify our index is in the list found_index = None for index in list_index_result.indexes: - if index.index_name == index_name: + if index.get('indexName') == index_name: found_index = index break self.assertIsNotNone(found_index) - self.assertEqual(index_name, found_index.index_name) - self.assertEqual(dimension, found_index.dimension) + self.assertEqual(index_name, found_index.get('indexName')) + self.assertEqual(dimension, found_index.get('dimension')) # 10. Delete index (cleanup) delete_index_result = self.vector_client.delete_vector_index(oss_vectors.models.DeleteVectorIndexRequest( diff --git a/tests/integration/vector/test_vector_bucket_basic_client.py b/tests/integration/vector/test_vector_bucket_basic_client.py index 8121277..0dd0cf0 100644 --- a/tests/integration/vector/test_vector_bucket_basic_client.py +++ b/tests/integration/vector/test_vector_bucket_basic_client.py @@ -1,16 +1,14 @@ # pylint: skip-file -from typing import cast -import alibabacloud_oss_v2 as oss import alibabacloud_oss_v2.vectors as oss_vectors -from tests.integration import TestIntegrationVectors, random_bucket_name +from tests.integration import TestIntegrationVectors, random_short_bucket_name, REGION, USER_ID class TestVectorBucketBasic(TestIntegrationVectors): def test_vector_bucket_basic(self): # create bucket - bucket_name = random_bucket_name() + bucket_name = random_short_bucket_name() result = self.vector_client.put_vector_bucket(oss_vectors.models.PutVectorBucketRequest( bucket=bucket_name, acl='private', @@ -28,7 +26,7 @@ def test_vector_bucket_basic(self): self.assertEqual('OK', result.status) self.assertEqual(24, len(result.request_id)) self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) - self.assertEqual(bucket_name, result.bucket_info.name) + self.assertEqual(f'acs:ossvector:{REGION}:{USER_ID}:{bucket_name}', result.bucket_info.name) self.assertIsNotNone(result.bucket_info.location) self.assertIsNotNone(result.bucket_info.creation_date) @@ -44,7 +42,7 @@ def test_vector_bucket_basic(self): self.assertGreater(len(result.buckets), 0) found = False for bucket in result.buckets: - if bucket.name == bucket_name: + if bucket.name.__contains__(bucket_name): found = True break self.assertTrue(found) diff --git a/tests/integration/vector/test_vector_bucket_logging_client.py b/tests/integration/vector/test_vector_bucket_logging_client.py index 72dcdc5..ec48c0f 100644 --- a/tests/integration/vector/test_vector_bucket_logging_client.py +++ b/tests/integration/vector/test_vector_bucket_logging_client.py @@ -1,7 +1,8 @@ # pylint: skip-file +import alibabacloud_oss_v2 as oss import alibabacloud_oss_v2.vectors as oss_vectors -from tests.integration import TestIntegrationVectors, random_bucket_name +from tests.integration import TestIntegrationVectors, random_short_bucket_name, random_bucket_name class TestVectorBucketLogging(TestIntegrationVectors): @@ -10,7 +11,7 @@ class TestVectorBucketLogging(TestIntegrationVectors): def test_vector_bucket_logging(self): """Test put, get, and delete bucket logging operations.""" # 1. Create buckets: source bucket and target bucket - source_bucket_name = random_bucket_name() + source_bucket_name = random_short_bucket_name() target_bucket_name = random_bucket_name() # Create source bucket @@ -24,21 +25,24 @@ def test_vector_bucket_logging(self): self.assertEqual(24, len(result.request_id)) self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) - # Create target bucket - result = self.vector_client.put_vector_bucket( - oss_vectors.models.PutVectorBucketRequest( - bucket=target_bucket_name, + + # create target bucket + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=target_bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' ) - ) + )) self.assertEqual(200, result.status_code) self.assertEqual('OK', result.status) self.assertEqual(24, len(result.request_id)) self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + # 2. Put bucket logging (enable logging) logging_prefix = 'log-prefix' logging_role = 'AliyunOSSLoggingDefaultRole' - # Using all 3 parameters for LoggingEnabled: target_bucket, target_prefix, logging_role put_result = self.vector_client.put_bucket_logging( oss_vectors.models.PutBucketLoggingRequest( bucket=source_bucket_name, @@ -66,12 +70,10 @@ def test_vector_bucket_logging(self): self.assertEqual('OK', get_result.status) self.assertEqual(24, len(get_result.request_id)) self.assertEqual(24, len(get_result.headers.get('x-oss-request-id'))) - # Verify the logging configuration retrieved matches what was set self.assertIsNotNone(get_result.bucket_logging_status) self.assertIsNotNone(get_result.bucket_logging_status.logging_enabled) self.assertEqual(target_bucket_name, get_result.bucket_logging_status.logging_enabled.target_bucket) self.assertEqual(logging_prefix, get_result.bucket_logging_status.logging_enabled.target_prefix) - # Verify the third parameter self.assertEqual(logging_role, get_result.bucket_logging_status.logging_enabled.logging_role) # 4. Delete bucket logging (disable logging) @@ -90,16 +92,14 @@ def test_vector_bucket_logging(self): bucket=source_bucket_name ) ) - # When logging is disabled, LoggingEnabled should be None or absent self.assertEqual(200, get_result_after_delete.status_code) self.assertEqual('OK', get_result_after_delete.status) self.assertEqual(24, len(get_result_after_delete.request_id)) self.assertEqual(24, len(get_result_after_delete.headers.get('x-oss-request-id'))) - # Verify logging is disabled (LoggingEnabled is None or absent) self.assertIsNotNone(get_result_after_delete.bucket_logging_status) - # According to OSS API, when logging is disabled, the LoggingEnabled element is not returned - # or is None. Check for None here. - self.assertIsNone(get_result_after_delete.bucket_logging_status.logging_enabled) + self.assertIsNone(get_result_after_delete.bucket_logging_status.logging_enabled.target_bucket) + self.assertIsNone(get_result_after_delete.bucket_logging_status.logging_enabled.target_prefix) + self.assertIsNone(get_result_after_delete.bucket_logging_status.logging_enabled.logging_role) # 6. Delete buckets (cleanup) # Delete source bucket @@ -113,8 +113,8 @@ def test_vector_bucket_logging(self): self.assertEqual(24, len(delete_source_result.headers.get('x-oss-request-id'))) # Delete target bucket - delete_target_result = self.vector_client.delete_vector_bucket( - oss_vectors.models.DeleteVectorBucketRequest( + delete_target_result = self.client.delete_bucket( + oss.models.DeleteBucketRequest( bucket=target_bucket_name, ) ) diff --git a/tests/integration/vector/test_vector_bucket_policy_client.py b/tests/integration/vector/test_vector_bucket_policy_client.py index c804158..e00fbf6 100644 --- a/tests/integration/vector/test_vector_bucket_policy_client.py +++ b/tests/integration/vector/test_vector_bucket_policy_client.py @@ -1,19 +1,16 @@ # pylint: skip-file - -from typing import cast -import alibabacloud_oss_v2 as oss +import time import alibabacloud_oss_v2.vectors as oss_vectors -from tests.integration import TestIntegrationVectors, random_bucket_name +from tests.integration import TestIntegrationVectors, random_short_bucket_name class TestVectorBucketBasic(TestIntegrationVectors): def test_vector_bucket_policy(self): # create bucket - bucket_name = random_bucket_name() + bucket_name = random_short_bucket_name() result = self.vector_client.put_vector_bucket(oss_vectors.models.PutVectorBucketRequest( bucket=bucket_name, - acl='private', )) self.assertEqual(200, result.status_code) self.assertEqual('OK', result.status) @@ -24,9 +21,10 @@ def test_vector_bucket_policy(self): policy_text += '{' policy_text += '"Version":"1",' policy_text += '"Statement":[{' - policy_text += '"Action":["oss:PutObject"],' - policy_text += '"Effect":"Allow",' - policy_text += f'"Resource": ["acs:oss:*:*:{bucket_name}","acs:oss:*:*:{bucket_name}/*"]' + policy_text += '"Action":["ossvector:PutVectors","ossvector:GetVectors"],' + policy_text += '"Effect":"Deny",' + policy_text += '"Principal":["1234567890"],' + policy_text += f'"Resource": ["acs:ossvector:*:*:{bucket_name}","acs:oss:*:*:{bucket_name}/*"]' policy_text += '}]}' # put bucket policy @@ -39,6 +37,8 @@ def test_vector_bucket_policy(self): self.assertEqual(24, len(result.request_id)) self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + time.sleep(1) + # get bucket policy result = self.vector_client.get_bucket_policy(oss_vectors.models.GetBucketPolicyRequest( bucket=bucket_name, @@ -53,8 +53,7 @@ def test_vector_bucket_policy(self): result = self.vector_client.delete_bucket_policy(oss_vectors.models.DeleteBucketPolicyRequest( bucket=bucket_name, )) - self.assertEqual(200, result.status_code) - self.assertEqual('OK', result.status) + self.assertEqual(204, result.status_code) self.assertEqual(24, len(result.request_id)) self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) diff --git a/tests/integration/vector/test_vector_index_client.py b/tests/integration/vector/test_vector_index_client.py index 78f7f9f..73193a7 100644 --- a/tests/integration/vector/test_vector_index_client.py +++ b/tests/integration/vector/test_vector_index_client.py @@ -1,8 +1,6 @@ # pylint: skip-file - - import alibabacloud_oss_v2.vectors as oss_vectors -from tests.integration import TestIntegrationVectors, random_bucket_name +from tests.integration import TestIntegrationVectors, random_short_bucket_name class TestVectorIndex(TestIntegrationVectors): @@ -11,7 +9,7 @@ class TestVectorIndex(TestIntegrationVectors): def test_vector_index_lifecycle(self): """Test the full lifecycle of a vector index: create (put), get, list, and delete.""" # 1. Create bucket for testing - bucket_name = random_bucket_name() + bucket_name = random_short_bucket_name() create_bucket_result = self.vector_client.put_vector_bucket( oss_vectors.models.PutVectorBucketRequest( bucket=bucket_name, @@ -23,10 +21,10 @@ def test_vector_index_lifecycle(self): self.assertEqual(24, len(create_bucket_result.headers.get('x-oss-request-id'))) # 2. Put (Create) a vector index - index_name = 'test-index-for-integration' - dimension = 128 - distance_metric = 'EUCLIDEAN' - data_type = 'vector' + index_name = 'testIndexForIntegration' + dimension = 3 + distance_metric = 'cosine' + data_type = 'float32' metadata = {"nonFilterableMetadataKeys": ["key1", "key2"]} put_index_request = oss_vectors.models.PutVectorIndexRequest( @@ -46,13 +44,6 @@ def test_vector_index_lifecycle(self): self.assertEqual(24, len(put_result.request_id)) self.assertEqual(24, len(put_result.headers.get('x-oss-request-id'))) - # Assert response contains index details - self.assertIsNotNone(put_result.index) - self.assertEqual(index_name, put_result.index.index_name) - self.assertEqual(dimension, put_result.index.dimension) - self.assertEqual(distance_metric, put_result.index.distance_metric) - # Check if other fields like status, create_time etc. are present if needed - # 3. Get the created vector index get_index_request = oss_vectors.models.GetVectorIndexRequest( bucket=bucket_name, @@ -69,9 +60,9 @@ def test_vector_index_lifecycle(self): # Assert retrieved index details match the created ones self.assertIsNotNone(get_result.index) - self.assertEqual(index_name, get_result.index.index_name) - self.assertEqual(dimension, get_result.index.dimension) - self.assertEqual(distance_metric, get_result.index.distance_metric) + self.assertEqual(index_name, get_result.index.get('indexName')) + self.assertEqual(dimension, get_result.index.get('dimension')) + self.assertEqual(distance_metric, get_result.index.get('distanceMetric')) # 4. List vector indexes and verify our index is included list_indexes_request = oss_vectors.models.ListVectorIndexesRequest( @@ -85,21 +76,19 @@ def test_vector_index_lifecycle(self): self.assertEqual('OK', list_result.status) self.assertEqual(24, len(list_result.request_id)) self.assertEqual(24, len(list_result.headers.get('x-oss-request-id'))) - - # Assert the list is present and contains our index self.assertIsNotNone(list_result.indexes) - self.assertGreater(len(list_result.indexes), 0) + self.assertEqual(len(list_result.indexes), 1) # Find our specific index in the list found_index = None for index in list_result.indexes: - if index.index_name == index_name: + if index.get('indexName') == index_name: found_index = index break self.assertIsNotNone(found_index, f"Index '{index_name}' not found in the list") - self.assertEqual(dimension, found_index.dimension) - self.assertEqual(distance_metric, found_index.distance_metric) + self.assertEqual(dimension, found_index.get('dimension')) + self.assertEqual(distance_metric, found_index.get('distanceMetric')) # 5. Delete the vector index delete_index_request = oss_vectors.models.DeleteVectorIndexRequest( diff --git a/tests/unit/vectors/models/test_index_basic.py b/tests/unit/vectors/models/test_index_basic.py index ec30b57..2b1983e 100644 --- a/tests/unit/vectors/models/test_index_basic.py +++ b/tests/unit/vectors/models/test_index_basic.py @@ -134,9 +134,9 @@ def test_deserialize_result(self): "metadata": { "nonFilterableMetadataKeys": ["key1", "key2"] }, - "status": "Active" + "status": "Active", + "vectorBucketName": "test-bucket" }, - "vectorBucketName": "test-bucket" } ''' @@ -160,9 +160,7 @@ def test_deserialize_result(self): self.assertEqual(result.index.get('distanceMetric'), 'EUCLIDEAN') self.assertEqual(result.index.get('indexName'), 'test-index') self.assertEqual(result.index.get('status'), 'Active') - self.assertEqual(result.vector_bucket_name, 'test-bucket') - self.assertIsNotNone(result.index.get('metadata')) - self.assertIn('nonFilterableMetadataKeys', result.index.get('metadata')) + self.assertEqual(result.index.get('vectorBucketName'), 'test-bucket') self.assertEqual(result.index.get('metadata').get('nonFilterableMetadataKeys'), ['key1', 'key2'])