From 1ec5a03a46e9f35b9c2181eaeb74c0bfd39736de Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 7 Mar 2025 11:40:55 -0500 Subject: [PATCH 01/48] Add sleep to test --- codegen/apis | 2 +- tests/integration/data/test_query.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/codegen/apis b/codegen/apis index eb79d8ea..483b3885 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit eb79d8ea0c146aebe36c3769e19cbe9618db2d54 +Subproject commit 483b3885439a51ef831b820bfa621e2c9515834f diff --git a/tests/integration/data/test_query.py b/tests/integration/data/test_query.py index 99971f8d..2a40968e 100644 --- a/tests/integration/data/test_query.py +++ b/tests/integration/data/test_query.py @@ -2,6 +2,7 @@ from pinecone import QueryResponse, Vector from ..helpers import embedding_values, poll_fetch_for_ids_in_namespace, random_string import logging +import time logger = logging.getLogger(__name__) @@ -62,6 +63,7 @@ def seed(idx, namespace): def seed_for_query(idx, query_namespace): seed(idx, query_namespace) seed(idx, "") + time.sleep(30) yield From 7eb6192b6bc73f6e14eb2eef5bd35062d2ca3e43 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Mon, 3 Mar 2025 10:28:36 -0500 Subject: [PATCH 02/48] Regenerate code for 2025-04 --- codegen/build-oas.sh | 22 ++++------ pinecone/core/openapi/db_control/__init__.py | 4 +- .../db_control/api/manage_indexes_api.py | 42 +++++++++---------- .../db_control/model/collection_list.py | 2 +- .../db_control/model/collection_model.py | 2 +- .../model/configure_index_request.py | 2 +- .../model/configure_index_request_embed.py | 2 +- .../model/configure_index_request_spec.py | 2 +- .../model/configure_index_request_spec_pod.py | 2 +- .../model/create_collection_request.py | 2 +- .../model/create_index_for_model_request.py | 2 +- .../create_index_for_model_request_embed.py | 6 ++- .../db_control/model/create_index_request.py | 2 +- .../db_control/model/deletion_protection.py | 2 +- .../db_control/model/error_response.py | 2 +- .../db_control/model/error_response_error.py | 2 +- .../openapi/db_control/model/index_list.py | 2 +- .../openapi/db_control/model/index_model.py | 2 +- .../db_control/model/index_model_spec.py | 2 +- .../db_control/model/index_model_status.py | 3 +- .../openapi/db_control/model/index_spec.py | 2 +- .../openapi/db_control/model/index_tags.py | 2 +- .../db_control/model/model_index_embed.py | 2 +- .../core/openapi/db_control/model/pod_spec.py | 2 +- .../model/pod_spec_metadata_config.py | 2 +- .../db_control/model/serverless_spec.py | 2 +- pinecone/core/openapi/db_data/__init__.py | 4 +- .../db_data/api/bulk_operations_api.py | 6 +-- .../db_data/api/vector_operations_api.py | 42 +++++++++---------- .../openapi/db_data/model/delete_request.py | 2 +- .../model/describe_index_stats_request.py | 2 +- .../openapi/db_data/model/fetch_response.py | 2 +- pinecone/core/openapi/db_data/model/hit.py | 2 +- .../db_data/model/import_error_mode.py | 2 +- .../openapi/db_data/model/import_model.py | 2 +- .../db_data/model/index_description.py | 2 +- .../db_data/model/list_imports_response.py | 2 +- .../core/openapi/db_data/model/list_item.py | 2 +- .../openapi/db_data/model/list_response.py | 2 +- .../db_data/model/namespace_summary.py | 2 +- .../core/openapi/db_data/model/pagination.py | 2 +- .../openapi/db_data/model/protobuf_any.py | 2 +- .../db_data/model/protobuf_null_value.py | 2 +- .../openapi/db_data/model/query_request.py | 14 +++---- .../openapi/db_data/model/query_response.py | 2 +- .../openapi/db_data/model/query_vector.py | 2 +- .../core/openapi/db_data/model/rpc_status.py | 2 +- .../openapi/db_data/model/scored_vector.py | 2 +- .../db_data/model/search_records_request.py | 6 +-- .../model/search_records_request_query.py | 10 ++--- .../model/search_records_request_rerank.py | 2 +- .../db_data/model/search_records_response.py | 2 +- .../model/search_records_response_result.py | 2 +- .../db_data/model/search_records_vector.py | 2 +- .../openapi/db_data/model/search_usage.py | 2 +- .../openapi/db_data/model/search_vector.py | 2 +- .../db_data/model/single_query_results.py | 2 +- .../openapi/db_data/model/sparse_values.py | 2 +- .../db_data/model/start_import_request.py | 2 +- .../db_data/model/start_import_response.py | 2 +- .../openapi/db_data/model/update_request.py | 2 +- .../openapi/db_data/model/upsert_record.py | 2 +- .../openapi/db_data/model/upsert_request.py | 2 +- .../openapi/db_data/model/upsert_response.py | 2 +- pinecone/core/openapi/db_data/model/usage.py | 2 +- pinecone/core/openapi/db_data/model/vector.py | 2 +- .../openapi/db_data/model/vector_values.py | 2 +- pinecone/core/openapi/inference/__init__.py | 4 +- .../openapi/inference/api/inference_api.py | 10 ++--- .../inference/model/dense_embedding.py | 2 +- .../core/openapi/inference/model/document.py | 2 +- .../openapi/inference/model/embed_request.py | 2 +- .../inference/model/embed_request_inputs.py | 2 +- .../core/openapi/inference/model/embedding.py | 2 +- .../inference/model/embeddings_list.py | 2 +- .../inference/model/embeddings_list_usage.py | 2 +- .../openapi/inference/model/error_response.py | 2 +- .../inference/model/error_response_error.py | 2 +- .../inference/model/ranked_document.py | 2 +- .../openapi/inference/model/rerank_request.py | 2 +- .../openapi/inference/model/rerank_result.py | 2 +- .../inference/model/rerank_result_usage.py | 2 +- .../inference/model/sparse_embedding.py | 2 +- .../openapi/inference/model/vector_type.py | 2 +- pinecone/openapi_support/api_version.py | 4 +- 85 files changed, 158 insertions(+), 161 deletions(-) diff --git a/codegen/build-oas.sh b/codegen/build-oas.sh index 04bc83b7..d4f83784 100755 --- a/codegen/build-oas.sh +++ b/codegen/build-oas.sh @@ -2,21 +2,13 @@ set -eux -o pipefail -version=$1 # e.g. 2024-07 -is_early_access=$2 # e.g. true - -# if is_early_access is true, add the "ea" module -if [ "$is_early_access" = "true" ]; then - destination="pinecone/core_ea/openapi" - modules=("db_control" "db_data" "inference") - py_module_name="core_ea" - template_dir="codegen/python-oas-templates/templates5.2.0" -else - destination="pinecone/core/openapi" - modules=("db_control" "db_data" "inference") - py_module_name="core" - template_dir="codegen/python-oas-templates/templates5.2.0" -fi +version=$1 # e.g. 2025-01 + + +destination="pinecone/core/openapi" +modules=("db_control" "db_data" "inference") +py_module_name="core" +template_dir="codegen/python-oas-templates/templates5.2.0" build_dir="build" diff --git a/pinecone/core/openapi/db_control/__init__.py b/pinecone/core/openapi/db_control/__init__.py index e8106fac..1a6949bb 100644 --- a/pinecone/core/openapi/db_control/__init__.py +++ b/pinecone/core/openapi/db_control/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/db_control/api/manage_indexes_api.py b/pinecone/core/openapi/db_control/api/manage_indexes_api.py index 2d2f464d..b347be5d 100644 --- a/pinecone/core/openapi/db_control/api/manage_indexes_api.py +++ b/pinecone/core/openapi/db_control/api/manage_indexes_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -52,7 +52,7 @@ def __configure_index( ): """Configure an index # noqa: E501 - This operation configures an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -128,7 +128,7 @@ def __create_collection( ): """Create a collection # noqa: E501 - This operation creates a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -197,7 +197,7 @@ def __create_collection( def __create_index(self, create_index_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Create an index # noqa: E501 - This operation deploys a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -266,9 +266,9 @@ def __create_index(self, create_index_request, **kwargs: ExtraOpenApiKwargsTyped def __create_index_for_model( self, create_index_for_model_request, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Create an index for an embedding model # noqa: E501 + """Create an index with integrated embedding # noqa: E501 - This operation creates a serverless integrated inference index for a specific embedding model. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available models and model details. # noqa: E501 + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -337,7 +337,7 @@ def __create_index_for_model( def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): """Delete a collection # noqa: E501 - This operation deletes an existing collection. Serverless indexes do not support collections. # noqa: E501 + Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -406,7 +406,7 @@ def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyped def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): """Delete an index # noqa: E501 - This operation deletes an existing index. # noqa: E501 + Delete an existing index. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -475,7 +475,7 @@ def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): """Describe a collection # noqa: E501 - This operation gets a description of a collection. Serverless indexes do not support collections. # noqa: E501 + Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -613,7 +613,7 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List collections # noqa: E501 - This operation returns a list of all collections in a project. Serverless indexes do not support collections. # noqa: E501 + List all collections in a project. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -673,7 +673,7 @@ def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List indexes # noqa: E501 - This operation returns a list of all indexes in a project. # noqa: E501 + List all indexes in a project. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -745,7 +745,7 @@ def __init__(self, api_client=None) -> None: async def __configure_index(self, index_name, configure_index_request, **kwargs): """Configure an index # noqa: E501 - This operation configures an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 Args: @@ -812,7 +812,7 @@ async def __configure_index(self, index_name, configure_index_request, **kwargs) async def __create_collection(self, create_collection_request, **kwargs): """Create a collection # noqa: E501 - This operation creates a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -874,7 +874,7 @@ async def __create_collection(self, create_collection_request, **kwargs): async def __create_index(self, create_index_request, **kwargs): """Create an index # noqa: E501 - This operation deploys a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 Args: @@ -934,9 +934,9 @@ async def __create_index(self, create_index_request, **kwargs): ) async def __create_index_for_model(self, create_index_for_model_request, **kwargs): - """Create an index for an embedding model # noqa: E501 + """Create an index with integrated embedding # noqa: E501 - This operation creates a serverless integrated inference index for a specific embedding model. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available models and model details. # noqa: E501 + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 Args: @@ -998,7 +998,7 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg async def __delete_collection(self, collection_name, **kwargs): """Delete a collection # noqa: E501 - This operation deletes an existing collection. Serverless indexes do not support collections. # noqa: E501 + Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -1060,7 +1060,7 @@ async def __delete_collection(self, collection_name, **kwargs): async def __delete_index(self, index_name, **kwargs): """Delete an index # noqa: E501 - This operation deletes an existing index. # noqa: E501 + Delete an existing index. # noqa: E501 Args: @@ -1122,7 +1122,7 @@ async def __delete_index(self, index_name, **kwargs): async def __describe_collection(self, collection_name, **kwargs): """Describe a collection # noqa: E501 - This operation gets a description of a collection. Serverless indexes do not support collections. # noqa: E501 + Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -1246,7 +1246,7 @@ async def __describe_index(self, index_name, **kwargs): async def __list_collections(self, **kwargs): """List collections # noqa: E501 - This operation returns a list of all collections in a project. Serverless indexes do not support collections. # noqa: E501 + List all collections in a project. Serverless indexes do not support collections. # noqa: E501 @@ -1299,7 +1299,7 @@ async def __list_collections(self, **kwargs): async def __list_indexes(self, **kwargs): """List indexes # noqa: E501 - This operation returns a list of all indexes in a project. # noqa: E501 + List all indexes in a project. # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/collection_list.py b/pinecone/core/openapi/db_control/model/collection_list.py index e36d3d9d..96e1632a 100644 --- a/pinecone/core/openapi/db_control/model/collection_list.py +++ b/pinecone/core/openapi/db_control/model/collection_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/collection_model.py b/pinecone/core/openapi/db_control/model/collection_model.py index 88d2334d..bafe08ab 100644 --- a/pinecone/core/openapi/db_control/model/collection_model.py +++ b/pinecone/core/openapi/db_control/model/collection_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/configure_index_request.py b/pinecone/core/openapi/db_control/model/configure_index_request.py index 7f4a5cf3..aa2d7704 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py index 55f12ebf..ce980b88 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py index 484ad71d..ae9a76fa 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py index 3a3ea96e..67721ce4 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/create_collection_request.py b/pinecone/core/openapi/db_control/model/create_collection_request.py index 0e14dc22..fe2807e2 100644 --- a/pinecone/core/openapi/db_control/model/create_collection_request.py +++ b/pinecone/core/openapi/db_control/model/create_collection_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py index c7d889a2..d4331ba4 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py index 88de090a..7d3f4f08 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -89,6 +89,7 @@ def openapi_types(cls): "model": (str,), # noqa: E501 "field_map": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 "metric": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 "read_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 "write_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 } @@ -101,6 +102,7 @@ def discriminator(cls): "model": "model", # noqa: E501 "field_map": "field_map", # noqa: E501 "metric": "metric", # noqa: E501 + "dimension": "dimension", # noqa: E501 "read_parameters": "read_parameters", # noqa: E501 "write_parameters": "write_parameters", # noqa: E501 } @@ -150,6 +152,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # through its discriminator because we passed in _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. [optional] # noqa: E501 + dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ @@ -242,6 +245,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. [optional] # noqa: E501 + dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_control/model/create_index_request.py b/pinecone/core/openapi/db_control/model/create_index_request.py index ae61e95a..ff9548b7 100644 --- a/pinecone/core/openapi/db_control/model/create_index_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/deletion_protection.py b/pinecone/core/openapi/db_control/model/deletion_protection.py index 24fc26f3..77682626 100644 --- a/pinecone/core/openapi/db_control/model/deletion_protection.py +++ b/pinecone/core/openapi/db_control/model/deletion_protection.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/error_response.py b/pinecone/core/openapi/db_control/model/error_response.py index b37c2e97..a64ae9e2 100644 --- a/pinecone/core/openapi/db_control/model/error_response.py +++ b/pinecone/core/openapi/db_control/model/error_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/error_response_error.py b/pinecone/core/openapi/db_control/model/error_response_error.py index 613ccc4f..488c99b5 100644 --- a/pinecone/core/openapi/db_control/model/error_response_error.py +++ b/pinecone/core/openapi/db_control/model/error_response_error.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/index_list.py b/pinecone/core/openapi/db_control/model/index_list.py index ed6c08df..99a4d6df 100644 --- a/pinecone/core/openapi/db_control/model/index_list.py +++ b/pinecone/core/openapi/db_control/model/index_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/index_model.py b/pinecone/core/openapi/db_control/model/index_model.py index 3fe45763..2e5c6224 100644 --- a/pinecone/core/openapi/db_control/model/index_model.py +++ b/pinecone/core/openapi/db_control/model/index_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index b27ec0f8..b75b803f 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/index_model_status.py b/pinecone/core/openapi/db_control/model/index_model_status.py index 0128f22e..2379c764 100644 --- a/pinecone/core/openapi/db_control/model/index_model_status.py +++ b/pinecone/core/openapi/db_control/model/index_model_status.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -69,6 +69,7 @@ class IndexModelStatus(ModelNormal): "SCALINGDOWNPODSIZE": "ScalingDownPodSize", "TERMINATING": "Terminating", "READY": "Ready", + "DISABLED": "Disabled", } } diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index 6a8c4db5..add2979e 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/index_tags.py b/pinecone/core/openapi/db_control/model/index_tags.py index 6111f7b5..bee527a5 100644 --- a/pinecone/core/openapi/db_control/model/index_tags.py +++ b/pinecone/core/openapi/db_control/model/index_tags.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/model_index_embed.py b/pinecone/core/openapi/db_control/model/model_index_embed.py index 624462a8..1e358059 100644 --- a/pinecone/core/openapi/db_control/model/model_index_embed.py +++ b/pinecone/core/openapi/db_control/model/model_index_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/pod_spec.py b/pinecone/core/openapi/db_control/model/pod_spec.py index c430c6a3..a0171c8c 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec.py +++ b/pinecone/core/openapi/db_control/model/pod_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py index 969471e6..d8df2bc9 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py +++ b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_control/model/serverless_spec.py b/pinecone/core/openapi/db_control/model/serverless_spec.py index 58e39e48..20ac901f 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/__init__.py b/pinecone/core/openapi/db_data/__init__.py index c878a548..e8cbbfe1 100644 --- a/pinecone/core/openapi/db_data/__init__.py +++ b/pinecone/core/openapi/db_data/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/db_data/api/bulk_operations_api.py b/pinecone/core/openapi/db_data/api/bulk_operations_api.py index fcfbac8a..c0be9543 100644 --- a/pinecone/core/openapi/db_data/api/bulk_operations_api.py +++ b/pinecone/core/openapi/db_data/api/bulk_operations_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -182,7 +182,7 @@ def __describe_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_bulk_imports(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, this returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -455,7 +455,7 @@ async def __describe_bulk_import(self, id, **kwargs): async def __list_bulk_imports(self, **kwargs): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, this returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 diff --git a/pinecone/core/openapi/db_data/api/vector_operations_api.py b/pinecone/core/openapi/db_data/api/vector_operations_api.py index 22bb47c3..a5a3c70e 100644 --- a/pinecone/core/openapi/db_data/api/vector_operations_api.py +++ b/pinecone/core/openapi/db_data/api/vector_operations_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -55,7 +55,7 @@ def __init__(self, api_client=None) -> None: def __delete_vectors(self, delete_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Delete vectors # noqa: E501 - Delete vectors, by id, from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -195,7 +195,7 @@ def __describe_index_stats( def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): """Fetch vectors # noqa: E501 - Look up and return vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -265,7 +265,7 @@ def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. This returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -348,9 +348,9 @@ def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): ) def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): - """Query vectors # noqa: E501 + """Search with a vector # noqa: E501 - Search a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -419,9 +419,9 @@ def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): def __search_records_namespace( self, namespace, search_records_request, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Search a namespace # noqa: E501 + """Search with text # noqa: E501 - This operation converts a query to a vector embedding and then searches a namespace using the embedding. It returns the most similar records in the namespace, along with their similarity scores. # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -564,9 +564,9 @@ def __update_vector(self, update_request, **kwargs: ExtraOpenApiKwargsTypedDict) def __upsert_records_namespace( self, namespace, upsert_record, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Upsert records into a namespace # noqa: E501 + """Upsert text # noqa: E501 - This operation converts input data to vector embeddings and then upserts the embeddings into a namespace. # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-text). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -637,7 +637,7 @@ def __upsert_records_namespace( def __upsert_vectors(self, upsert_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Upsert vectors # noqa: E501 - Write vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-vectors). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -718,7 +718,7 @@ def __init__(self, api_client=None) -> None: async def __delete_vectors(self, delete_request, **kwargs): """Delete vectors # noqa: E501 - Delete vectors, by id, from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 Args: @@ -842,7 +842,7 @@ async def __describe_index_stats(self, describe_index_stats_request, **kwargs): async def __fetch_vectors(self, ids, **kwargs): """Fetch vectors # noqa: E501 - Look up and return vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 Args: @@ -905,7 +905,7 @@ async def __fetch_vectors(self, ids, **kwargs): async def __list_vectors(self, **kwargs): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. This returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -981,9 +981,9 @@ async def __list_vectors(self, **kwargs): ) async def __query_vectors(self, query_request, **kwargs): - """Query vectors # noqa: E501 + """Search with a vector # noqa: E501 - Search a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 Args: @@ -1043,9 +1043,9 @@ async def __query_vectors(self, query_request, **kwargs): ) async def __search_records_namespace(self, namespace, search_records_request, **kwargs): - """Search a namespace # noqa: E501 + """Search with text # noqa: E501 - This operation converts a query to a vector embedding and then searches a namespace using the embedding. It returns the most similar records in the namespace, along with their similarity scores. # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 Args: @@ -1172,9 +1172,9 @@ async def __update_vector(self, update_request, **kwargs): ) async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): - """Upsert records into a namespace # noqa: E501 + """Upsert text # noqa: E501 - This operation converts input data to vector embeddings and then upserts the embeddings into a namespace. # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-text). # noqa: E501 Args: @@ -1238,7 +1238,7 @@ async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): async def __upsert_vectors(self, upsert_request, **kwargs): """Upsert vectors # noqa: E501 - Write vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-vectors). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index 79e39905..bfb15b87 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index 769f8dcb..e024d5f3 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/fetch_response.py b/pinecone/core/openapi/db_data/model/fetch_response.py index 6a918dbc..f662b596 100644 --- a/pinecone/core/openapi/db_data/model/fetch_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/hit.py b/pinecone/core/openapi/db_data/model/hit.py index 8af69b37..b8e16de9 100644 --- a/pinecone/core/openapi/db_data/model/hit.py +++ b/pinecone/core/openapi/db_data/model/hit.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/import_error_mode.py b/pinecone/core/openapi/db_data/model/import_error_mode.py index dd8a0e71..7bda43b0 100644 --- a/pinecone/core/openapi/db_data/model/import_error_mode.py +++ b/pinecone/core/openapi/db_data/model/import_error_mode.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/import_model.py b/pinecone/core/openapi/db_data/model/import_model.py index d1d4589f..4d990886 100644 --- a/pinecone/core/openapi/db_data/model/import_model.py +++ b/pinecone/core/openapi/db_data/model/import_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/index_description.py b/pinecone/core/openapi/db_data/model/index_description.py index 1bbc7198..8fa8aa1f 100644 --- a/pinecone/core/openapi/db_data/model/index_description.py +++ b/pinecone/core/openapi/db_data/model/index_description.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/list_imports_response.py b/pinecone/core/openapi/db_data/model/list_imports_response.py index 1b485e8b..254de580 100644 --- a/pinecone/core/openapi/db_data/model/list_imports_response.py +++ b/pinecone/core/openapi/db_data/model/list_imports_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/list_item.py b/pinecone/core/openapi/db_data/model/list_item.py index ad58c600..14d77869 100644 --- a/pinecone/core/openapi/db_data/model/list_item.py +++ b/pinecone/core/openapi/db_data/model/list_item.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/list_response.py b/pinecone/core/openapi/db_data/model/list_response.py index 4a8d0c5e..c4e44746 100644 --- a/pinecone/core/openapi/db_data/model/list_response.py +++ b/pinecone/core/openapi/db_data/model/list_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/namespace_summary.py b/pinecone/core/openapi/db_data/model/namespace_summary.py index a2ba425a..e9f8080c 100644 --- a/pinecone/core/openapi/db_data/model/namespace_summary.py +++ b/pinecone/core/openapi/db_data/model/namespace_summary.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/pagination.py b/pinecone/core/openapi/db_data/model/pagination.py index f93898c8..14cb2301 100644 --- a/pinecone/core/openapi/db_data/model/pagination.py +++ b/pinecone/core/openapi/db_data/model/pagination.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/protobuf_any.py b/pinecone/core/openapi/db_data/model/protobuf_any.py index 6e2c3c54..aebbe361 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_any.py +++ b/pinecone/core/openapi/db_data/model/protobuf_any.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/protobuf_null_value.py b/pinecone/core/openapi/db_data/model/protobuf_null_value.py index 02ac25b8..b46f14d2 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_null_value.py +++ b/pinecone/core/openapi/db_data/model/protobuf_null_value.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 23763e6d..7aa460c0 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -170,13 +170,13 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. [optional] # noqa: E501 sparse_vector (SparseValues): [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -265,13 +265,13 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. [optional] # noqa: E501 sparse_vector (SparseValues): [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/db_data/model/query_response.py b/pinecone/core/openapi/db_data/model/query_response.py index fde3a55d..937de236 100644 --- a/pinecone/core/openapi/db_data/model/query_response.py +++ b/pinecone/core/openapi/db_data/model/query_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/query_vector.py b/pinecone/core/openapi/db_data/model/query_vector.py index 30f4c8de..12caef5d 100644 --- a/pinecone/core/openapi/db_data/model/query_vector.py +++ b/pinecone/core/openapi/db_data/model/query_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/rpc_status.py b/pinecone/core/openapi/db_data/model/rpc_status.py index fa824641..30196b58 100644 --- a/pinecone/core/openapi/db_data/model/rpc_status.py +++ b/pinecone/core/openapi/db_data/model/rpc_status.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/scored_vector.py b/pinecone/core/openapi/db_data/model/scored_vector.py index 87d51d94..949f4e2a 100644 --- a/pinecone/core/openapi/db_data/model/scored_vector.py +++ b/pinecone/core/openapi/db_data/model/scored_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_records_request.py b/pinecone/core/openapi/db_data/model/search_records_request.py index 9393c415..df5fcd23 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request.py +++ b/pinecone/core/openapi/db_data/model/search_records_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -156,7 +156,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - fields ([str]): The fields to return in the search results. [optional] # noqa: E501 + fields ([str]): The fields to return in the search results. If not specified, the response will include all fields. [optional] # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ @@ -245,7 +245,7 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - fields ([str]): The fields to return in the search results. [optional] # noqa: E501 + fields ([str]): The fields to return in the search results. If not specified, the response will include all fields. [optional] # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index 5cedde38..6898e200 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -121,7 +121,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 """SearchRecordsRequestQuery - a model defined in OpenAPI Args: - top_k (int): The number of results to return for each search. + top_k (int): The number of similar records to return. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -154,7 +154,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 @@ -212,7 +212,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 """SearchRecordsRequestQuery - a model defined in OpenAPI Args: - top_k (int): The number of results to return for each search. + top_k (int): The number of similar records to return. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -245,7 +245,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index 6b6515bb..3138c601 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_records_response.py b/pinecone/core/openapi/db_data/model/search_records_response.py index 440fa71b..95958624 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response.py +++ b/pinecone/core/openapi/db_data/model/search_records_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_records_response_result.py b/pinecone/core/openapi/db_data/model/search_records_response_result.py index 87ecf017..87b0a5c6 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response_result.py +++ b/pinecone/core/openapi/db_data/model/search_records_response_result.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_records_vector.py b/pinecone/core/openapi/db_data/model/search_records_vector.py index d2466cd7..e5d551a5 100644 --- a/pinecone/core/openapi/db_data/model/search_records_vector.py +++ b/pinecone/core/openapi/db_data/model/search_records_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_usage.py b/pinecone/core/openapi/db_data/model/search_usage.py index 565efc9f..dc9217a8 100644 --- a/pinecone/core/openapi/db_data/model/search_usage.py +++ b/pinecone/core/openapi/db_data/model/search_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/search_vector.py b/pinecone/core/openapi/db_data/model/search_vector.py index 417bde50..88f18151 100644 --- a/pinecone/core/openapi/db_data/model/search_vector.py +++ b/pinecone/core/openapi/db_data/model/search_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/single_query_results.py b/pinecone/core/openapi/db_data/model/single_query_results.py index b10e442d..bfc1bb2f 100644 --- a/pinecone/core/openapi/db_data/model/single_query_results.py +++ b/pinecone/core/openapi/db_data/model/single_query_results.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/sparse_values.py b/pinecone/core/openapi/db_data/model/sparse_values.py index 0883d17e..c7f48e74 100644 --- a/pinecone/core/openapi/db_data/model/sparse_values.py +++ b/pinecone/core/openapi/db_data/model/sparse_values.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index 891de0b2..1a4a9629 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/start_import_response.py b/pinecone/core/openapi/db_data/model/start_import_response.py index ede99271..37da3ba6 100644 --- a/pinecone/core/openapi/db_data/model/start_import_response.py +++ b/pinecone/core/openapi/db_data/model/start_import_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/update_request.py b/pinecone/core/openapi/db_data/model/update_request.py index cfa9f344..bd937ebd 100644 --- a/pinecone/core/openapi/db_data/model/update_request.py +++ b/pinecone/core/openapi/db_data/model/update_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/upsert_record.py b/pinecone/core/openapi/db_data/model/upsert_record.py index b95940eb..8ae7a048 100644 --- a/pinecone/core/openapi/db_data/model/upsert_record.py +++ b/pinecone/core/openapi/db_data/model/upsert_record.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index c842647a..19058a3b 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/upsert_response.py b/pinecone/core/openapi/db_data/model/upsert_response.py index 2ba3056d..e4da7d30 100644 --- a/pinecone/core/openapi/db_data/model/upsert_response.py +++ b/pinecone/core/openapi/db_data/model/upsert_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/usage.py b/pinecone/core/openapi/db_data/model/usage.py index 7ddc0c28..a8ab222f 100644 --- a/pinecone/core/openapi/db_data/model/usage.py +++ b/pinecone/core/openapi/db_data/model/usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/vector.py b/pinecone/core/openapi/db_data/model/vector.py index 167806f9..de061b7d 100644 --- a/pinecone/core/openapi/db_data/model/vector.py +++ b/pinecone/core/openapi/db_data/model/vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/db_data/model/vector_values.py b/pinecone/core/openapi/db_data/model/vector_values.py index 7777cdc5..acaade88 100644 --- a/pinecone/core/openapi/db_data/model/vector_values.py +++ b/pinecone/core/openapi/db_data/model/vector_values.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/__init__.py b/pinecone/core/openapi/inference/__init__.py index 3e258e4f..d878080c 100644 --- a/pinecone/core/openapi/inference/__init__.py +++ b/pinecone/core/openapi/inference/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 1dc0e266..6a425a03 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -42,9 +42,9 @@ def __init__(self, api_client=None) -> None: self.api_client = api_client def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """Embed data # noqa: E501 + """Generate vectors # noqa: E501 - Generate embeddings for input data. For guidance and examples, see [Generate embeddings](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). For guidance and examples, see [Embed data](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -188,9 +188,9 @@ def __init__(self, api_client=None) -> None: self.api_client = api_client async def __embed(self, **kwargs): - """Embed data # noqa: E501 + """Generate vectors # noqa: E501 - Generate embeddings for input data. For guidance and examples, see [Generate embeddings](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). For guidance and examples, see [Embed data](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/dense_embedding.py b/pinecone/core/openapi/inference/model/dense_embedding.py index fe841f99..ec45c7bd 100644 --- a/pinecone/core/openapi/inference/model/dense_embedding.py +++ b/pinecone/core/openapi/inference/model/dense_embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/document.py b/pinecone/core/openapi/inference/model/document.py index 159969aa..99b88161 100644 --- a/pinecone/core/openapi/inference/model/document.py +++ b/pinecone/core/openapi/inference/model/document.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index 6f91a05a..58e31290 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/embed_request_inputs.py b/pinecone/core/openapi/inference/model/embed_request_inputs.py index 44832a2e..1eda05b0 100644 --- a/pinecone/core/openapi/inference/model/embed_request_inputs.py +++ b/pinecone/core/openapi/inference/model/embed_request_inputs.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/embedding.py b/pinecone/core/openapi/inference/model/embedding.py index a7150a27..72fd9b2d 100644 --- a/pinecone/core/openapi/inference/model/embedding.py +++ b/pinecone/core/openapi/inference/model/embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/embeddings_list.py b/pinecone/core/openapi/inference/model/embeddings_list.py index 0b7de1ef..2c4a4edb 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list.py +++ b/pinecone/core/openapi/inference/model/embeddings_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/embeddings_list_usage.py b/pinecone/core/openapi/inference/model/embeddings_list_usage.py index 95e3263e..ebfb2ce6 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list_usage.py +++ b/pinecone/core/openapi/inference/model/embeddings_list_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/error_response.py b/pinecone/core/openapi/inference/model/error_response.py index ce18d362..e08f8449 100644 --- a/pinecone/core/openapi/inference/model/error_response.py +++ b/pinecone/core/openapi/inference/model/error_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/error_response_error.py b/pinecone/core/openapi/inference/model/error_response_error.py index e6884825..b864fab6 100644 --- a/pinecone/core/openapi/inference/model/error_response_error.py +++ b/pinecone/core/openapi/inference/model/error_response_error.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/ranked_document.py b/pinecone/core/openapi/inference/model/ranked_document.py index 6223c752..be8699dc 100644 --- a/pinecone/core/openapi/inference/model/ranked_document.py +++ b/pinecone/core/openapi/inference/model/ranked_document.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index dea9ee3c..5727a4f7 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/rerank_result.py b/pinecone/core/openapi/inference/model/rerank_result.py index 2abeb482..ee9e6fa6 100644 --- a/pinecone/core/openapi/inference/model/rerank_result.py +++ b/pinecone/core/openapi/inference/model/rerank_result.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/rerank_result_usage.py b/pinecone/core/openapi/inference/model/rerank_result_usage.py index 44f45c33..ad24c7a7 100644 --- a/pinecone/core/openapi/inference/model/rerank_result_usage.py +++ b/pinecone/core/openapi/inference/model/rerank_result_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/sparse_embedding.py b/pinecone/core/openapi/inference/model/sparse_embedding.py index f4f97415..f3b34b53 100644 --- a/pinecone/core/openapi/inference/model/sparse_embedding.py +++ b/pinecone/core/openapi/inference/model/sparse_embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/core/openapi/inference/model/vector_type.py b/pinecone/core/openapi/inference/model/vector_type.py index ffeb8706..3985d043 100644 --- a/pinecone/core/openapi/inference/model/vector_type.py +++ b/pinecone/core/openapi/inference/model/vector_type.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index fef29cbb..8d89024c 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -1,5 +1,5 @@ # This file is generated by codegen/build-oas.sh # Do not edit this file manually. -API_VERSION = "2025-01" -APIS_REPO_SHA = "eb79d8ea0c146aebe36c3769e19cbe9618db2d54" +API_VERSION = "2025-04" +APIS_REPO_SHA = "bec62979f009fe5dd3761943896a094dc7fffc4f" From 231f8c4f47563fdae6183ad09febb3980999f5bc Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 7 Mar 2025 10:09:47 -0500 Subject: [PATCH 03/48] Regen --- pinecone/openapi_support/api_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index 8d89024c..4879ae90 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -2,4 +2,4 @@ # Do not edit this file manually. API_VERSION = "2025-04" -APIS_REPO_SHA = "bec62979f009fe5dd3761943896a094dc7fffc4f" +APIS_REPO_SHA = "483b3885439a51ef831b820bfa621e2c9515834f" From 87020c9760f1160c5f3a5e0a82b1fc4613229a20 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 14 Mar 2025 10:13:51 -0400 Subject: [PATCH 04/48] Regen --- codegen/apis | 2 +- .../db_control/api/manage_indexes_api.py | 1362 +++++++++++++++-- .../openapi/db_control/model/backup_list.py | 274 ++++ .../openapi/db_control/model/backup_model.py | 356 +++++ .../db_control/model/create_backup_request.py | 266 ++++ .../model/create_index_from_backup_request.py | 286 ++++ .../db_control/model/dedicated_spec.py | 266 ++++ .../db_control/model/index_model_spec.py | 6 + .../openapi/db_control/model/index_spec.py | 6 + .../db_control/model/pagination_response.py | 266 ++++ .../db_control/model/restore_job_list.py | 280 ++++ .../db_control/model/restore_job_model.py | 326 ++++ .../openapi/db_control/models/__init__.py | 10 + .../openapi/inference/api/inference_api.py | 268 ++++ .../openapi/inference/model/model_info.py | 333 ++++ .../inference/model/model_info_list.py | 268 ++++ .../inference/model/model_info_metric.py | 284 ++++ .../model/model_info_supported_metrics.py | 290 ++++ .../model/model_info_supported_parameter.py | 302 ++++ .../core/openapi/inference/models/__init__.py | 9 + 20 files changed, 5323 insertions(+), 137 deletions(-) create mode 100644 pinecone/core/openapi/db_control/model/backup_list.py create mode 100644 pinecone/core/openapi/db_control/model/backup_model.py create mode 100644 pinecone/core/openapi/db_control/model/create_backup_request.py create mode 100644 pinecone/core/openapi/db_control/model/create_index_from_backup_request.py create mode 100644 pinecone/core/openapi/db_control/model/dedicated_spec.py create mode 100644 pinecone/core/openapi/db_control/model/pagination_response.py create mode 100644 pinecone/core/openapi/db_control/model/restore_job_list.py create mode 100644 pinecone/core/openapi/db_control/model/restore_job_model.py create mode 100644 pinecone/core/openapi/inference/model/model_info.py create mode 100644 pinecone/core/openapi/inference/model/model_info_list.py create mode 100644 pinecone/core/openapi/inference/model/model_info_metric.py create mode 100644 pinecone/core/openapi/inference/model/model_info_supported_metrics.py create mode 100644 pinecone/core/openapi/inference/model/model_info_supported_parameter.py diff --git a/codegen/apis b/codegen/apis index 483b3885..ba143abc 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit 483b3885439a51ef831b820bfa621e2c9515834f +Subproject commit ba143abc7449abfcf0b6635f1aabff2400dac762 diff --git a/pinecone/core/openapi/db_control/api/manage_indexes_api.py b/pinecone/core/openapi/db_control/api/manage_indexes_api.py index b347be5d..3796fd26 100644 --- a/pinecone/core/openapi/db_control/api/manage_indexes_api.py +++ b/pinecone/core/openapi/db_control/api/manage_indexes_api.py @@ -23,17 +23,25 @@ none_type, validate_and_convert_types, ) +from pinecone.core.openapi.db_control.model.backup_list import BackupList +from pinecone.core.openapi.db_control.model.backup_model import BackupModel from pinecone.core.openapi.db_control.model.collection_list import CollectionList from pinecone.core.openapi.db_control.model.collection_model import CollectionModel from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest from pinecone.core.openapi.db_control.model.error_response import ErrorResponse from pinecone.core.openapi.db_control.model.index_list import IndexList from pinecone.core.openapi.db_control.model.index_model import IndexModel +from pinecone.core.openapi.db_control.model.restore_job_list import RestoreJobList +from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel class ManageIndexesApi: @@ -123,6 +131,82 @@ def __configure_index( callable=__configure_index, ) + def __create_backup( + self, index_name, create_backup_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Create a backup of an index # noqa: E501 + + Create a backup of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_backup(index_name, create_backup_request, async_req=True) + >>> result = thread.get() + + Args: + index_name (str): Name of the index to backup + create_backup_request (CreateBackupRequest): The desired configuration for the backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupModel + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["create_backup_request"] = create_backup_request + return self.call_with_http_info(**kwargs) + + self.create_backup = _Endpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "create_backup", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["index_name", "create_backup_request"], + "required": ["index_name", "create_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "create_backup_request": (CreateBackupRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "create_backup_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_backup, + ) + def __create_collection( self, create_collection_request, **kwargs: ExtraOpenApiKwargsTypedDict ): @@ -334,18 +418,21 @@ def __create_index_for_model( callable=__create_index_for_model, ) - def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Delete a collection # noqa: E501 + def __create_index_from_backup( + self, backup_id, create_index_from_backup_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Create an index from a backup # noqa: E501 - Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 + Create an index from a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_collection(collection_name, async_req=True) + >>> thread = api.create_index_from_backup(backup_id, create_index_from_backup_request, async_req=True) >>> result = thread.get() Args: - collection_name (str): The name of the collection. + backup_id (str): The ID of the backup to create an index from. + create_index_from_backup_request (CreateIndexFromBackupRequest): The desired configuration for the index created from a backup. Keyword Args: _return_http_data_only (bool): response data without head status @@ -371,21 +458,22 @@ def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyped thread. """ kwargs = self._process_openapi_kwargs(kwargs) - kwargs["collection_name"] = collection_name + kwargs["backup_id"] = backup_id + kwargs["create_index_from_backup_request"] = create_index_from_backup_request return self.call_with_http_info(**kwargs) - self.delete_collection = _Endpoint( + self.create_index_from_backup = _Endpoint( settings={ "response_type": None, "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections/{collection_name}", - "operation_id": "delete_collection", - "http_method": "DELETE", + "endpoint_path": "/backups/{backup_id}/create-index", + "operation_id": "create_index_from_backup", + "http_method": "POST", "servers": None, }, params_map={ - "all": ["collection_name"], - "required": ["collection_name"], + "all": ["backup_id", "create_index_from_backup_request"], + "required": ["backup_id", "create_index_from_backup_request"], "nullable": [], "enum": [], "validation": [], @@ -393,28 +481,31 @@ def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyped root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"collection_name": (str,)}, - "attribute_map": {"collection_name": "collection_name"}, - "location_map": {"collection_name": "path"}, + "openapi_types": { + "backup_id": (str,), + "create_index_from_backup_request": (CreateIndexFromBackupRequest,), + }, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path", "create_index_from_backup_request": "body"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": []}, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__delete_collection, + callable=__create_index_from_backup, ) - def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Delete an index # noqa: E501 + def __delete_backup(self, backup_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a backup # noqa: E501 - Delete an existing index. # noqa: E501 + Delete a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_index(index_name, async_req=True) + >>> thread = api.delete_backup(backup_id, async_req=True) >>> result = thread.get() Args: - index_name (str): The name of the index to delete. + backup_id (str): The ID of the backup to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -440,21 +531,21 @@ def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): thread. """ kwargs = self._process_openapi_kwargs(kwargs) - kwargs["index_name"] = index_name + kwargs["backup_id"] = backup_id return self.call_with_http_info(**kwargs) - self.delete_index = _Endpoint( + self.delete_backup = _Endpoint( settings={ "response_type": None, "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes/{index_name}", - "operation_id": "delete_index", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "delete_backup", "http_method": "DELETE", "servers": None, }, params_map={ - "all": ["index_name"], - "required": ["index_name"], + "all": ["backup_id"], + "required": ["backup_id"], "nullable": [], "enum": [], "validation": [], @@ -462,28 +553,28 @@ def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"index_name": (str,)}, - "attribute_map": {"index_name": "index_name"}, - "location_map": {"index_name": "path"}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__delete_index, + callable=__delete_backup, ) - def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Describe a collection # noqa: E501 + def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a collection # noqa: E501 - Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 + Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.describe_collection(collection_name, async_req=True) + >>> thread = api.delete_collection(collection_name, async_req=True) >>> result = thread.get() Args: - collection_name (str): The name of the collection to be described. + collection_name (str): The name of the collection. Keyword Args: _return_http_data_only (bool): response data without head status @@ -504,7 +595,7 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp async_req (bool): execute request asynchronously Returns: - CollectionModel + None If the method is called asynchronously, returns the request thread. """ @@ -512,13 +603,13 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) - self.describe_collection = _Endpoint( + self.delete_collection = _Endpoint( settings={ - "response_type": (CollectionModel,), + "response_type": None, "auth": ["ApiKeyAuth"], "endpoint_path": "/collections/{collection_name}", - "operation_id": "describe_collection", - "http_method": "GET", + "operation_id": "delete_collection", + "http_method": "DELETE", "servers": None, }, params_map={ @@ -538,21 +629,21 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__describe_collection, + callable=__delete_collection, ) - def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Describe an index # noqa: E501 + def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete an index # noqa: E501 - Get a description of an index. # noqa: E501 + Delete an existing index. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.describe_index(index_name, async_req=True) + >>> thread = api.delete_index(index_name, async_req=True) >>> result = thread.get() Args: - index_name (str): The name of the index to be described. + index_name (str): The name of the index to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -573,7 +664,7 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - IndexModel + None If the method is called asynchronously, returns the request thread. """ @@ -581,13 +672,13 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): kwargs["index_name"] = index_name return self.call_with_http_info(**kwargs) - self.describe_index = _Endpoint( + self.delete_index = _Endpoint( settings={ - "response_type": (IndexModel,), + "response_type": None, "auth": ["ApiKeyAuth"], "endpoint_path": "/indexes/{index_name}", - "operation_id": "describe_index", - "http_method": "GET", + "operation_id": "delete_index", + "http_method": "DELETE", "servers": None, }, params_map={ @@ -607,19 +698,21 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__describe_index, + callable=__delete_index, ) - def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """List collections # noqa: E501 + def __describe_backup(self, backup_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a backup # noqa: E501 - List all collections in a project. Serverless indexes do not support collections. # noqa: E501 + Get a description of a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_collections(async_req=True) + >>> thread = api.describe_backup(backup_id, async_req=True) >>> result = thread.get() + Args: + backup_id (str): The ID of the backup to describe. Keyword Args: _return_http_data_only (bool): response data without head status @@ -640,46 +733,55 @@ def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - CollectionList + BackupModel If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id return self.call_with_http_info(**kwargs) - self.list_collections = _Endpoint( + self.describe_backup = _Endpoint( settings={ - "response_type": (CollectionList,), + "response_type": (BackupModel,), "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections", - "operation_id": "list_collections", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "describe_backup", "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["backup_id"], + "required": ["backup_id"], + "nullable": [], + "enum": [], + "validation": [], + }, root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__list_collections, + callable=__describe_backup, ) - def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """List indexes # noqa: E501 + def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a collection # noqa: E501 - List all indexes in a project. # noqa: E501 + Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_indexes(async_req=True) + >>> thread = api.describe_collection(collection_name, async_req=True) >>> result = thread.get() + Args: + collection_name (str): The name of the collection to be described. Keyword Args: _return_http_data_only (bool): response data without head status @@ -700,57 +802,55 @@ def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - IndexList + CollectionModel If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) + kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) - self.list_indexes = _Endpoint( + self.describe_collection = _Endpoint( settings={ - "response_type": (IndexList,), + "response_type": (CollectionModel,), "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes", - "operation_id": "list_indexes", + "endpoint_path": "/collections/{collection_name}", + "operation_id": "describe_collection", "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["collection_name"], + "required": ["collection_name"], + "nullable": [], + "enum": [], + "validation": [], + }, root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"collection_name": (str,)}, + "attribute_map": {"collection_name": "collection_name"}, + "location_map": {"collection_name": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__list_indexes, + callable=__describe_collection, ) + def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe an index # noqa: E501 -class AsyncioManageIndexesApi: - """NOTE: This class is @generated using OpenAPI - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = AsyncioApiClient() - self.api_client = api_client - - async def __configure_index(self, index_name, configure_index_request, **kwargs): - """Configure an index # noqa: E501 - - Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + Get a description of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.describe_index(index_name, async_req=True) + >>> result = thread.get() Args: - index_name (str): The name of the index to configure. - configure_index_request (ConfigureIndexRequest): The desired pod size and replica configuration for the index. + index_name (str): The name of the index to be described. Keyword Args: _return_http_data_only (bool): response data without head status @@ -768,27 +868,29 @@ async def __configure_index(self, index_name, configure_index_request, **kwargs) _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. + async_req (bool): execute request asynchronously Returns: IndexModel + If the method is called asynchronously, returns the request + thread. """ - self._process_openapi_kwargs(kwargs) + kwargs = self._process_openapi_kwargs(kwargs) kwargs["index_name"] = index_name - kwargs["configure_index_request"] = configure_index_request - return await self.call_with_http_info(**kwargs) + return self.call_with_http_info(**kwargs) - self.configure_index = _AsyncioEndpoint( + self.describe_index = _Endpoint( settings={ "response_type": (IndexModel,), "auth": ["ApiKeyAuth"], "endpoint_path": "/indexes/{index_name}", - "operation_id": "configure_index", - "http_method": "PATCH", + "operation_id": "describe_index", + "http_method": "GET", "servers": None, }, params_map={ - "all": ["index_name", "configure_index_request"], - "required": ["index_name", "configure_index_request"], + "all": ["index_name"], + "required": ["index_name"], "nullable": [], "enum": [], "validation": [], @@ -796,23 +898,566 @@ async def __configure_index(self, index_name, configure_index_request, **kwargs) root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": { - "index_name": (str,), - "configure_index_request": (ConfigureIndexRequest,), - }, + "openapi_types": {"index_name": (str,)}, "attribute_map": {"index_name": "index_name"}, - "location_map": {"index_name": "path", "configure_index_request": "body"}, + "location_map": {"index_name": "path"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__configure_index, + callable=__describe_index, ) - async def __create_collection(self, create_collection_request, **kwargs): - """Create a collection # noqa: E501 + def __describe_restore_job(self, job_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a restore job # noqa: E501 - Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + Get a description of a restore job. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_restore_job(job_id, async_req=True) + >>> result = thread.get() + + Args: + job_id (str): The ID of the restore job to describe. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + RestoreJobModel + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["job_id"] = job_id + return self.call_with_http_info(**kwargs) + + self.describe_restore_job = _Endpoint( + settings={ + "response_type": (RestoreJobModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs/{job_id}", + "operation_id": "describe_restore_job", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["job_id"], + "required": ["job_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"job_id": (str,)}, + "attribute_map": {"job_id": "job_id"}, + "location_map": {"job_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_restore_job, + ) + + def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List collections # noqa: E501 + + List all collections in a project. Serverless indexes do not support collections. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_collections(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + CollectionList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_collections = _Endpoint( + settings={ + "response_type": (CollectionList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections", + "operation_id": "list_collections", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_collections, + ) + + def __list_index_backups(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """List backups for an index # noqa: E501 + + List all backups for an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_index_backups(index_name, async_req=True) + >>> result = thread.get() + + Args: + index_name (str): Name of the backed up index + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + return self.call_with_http_info(**kwargs) + + self.list_index_backups = _Endpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "list_index_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["index_name", "limit", "pagination_token"], + "required": ["index_name"], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "limit": (int,), + "pagination_token": (str,), + }, + "attribute_map": { + "index_name": "index_name", + "limit": "limit", + "pagination_token": "paginationToken", + }, + "location_map": { + "index_name": "path", + "limit": "query", + "pagination_token": "query", + }, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_index_backups, + ) + + def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List indexes # noqa: E501 + + List all indexes in a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_indexes(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + IndexList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_indexes = _Endpoint( + settings={ + "response_type": (IndexList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes", + "operation_id": "list_indexes", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_indexes, + ) + + def __list_project_backups(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List backups for all indexes in a project # noqa: E501 + + List all backups for a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_project_backups(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_project_backups = _Endpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups", + "operation_id": "list_project_backups", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_project_backups, + ) + + def __list_restore_jobs(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List restore jobs # noqa: E501 + + List all restore jobs for a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_restore_jobs(async_req=True) + >>> result = thread.get() + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + RestoreJobList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_restore_jobs = _Endpoint( + settings={ + "response_type": (RestoreJobList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs", + "operation_id": "list_restore_jobs", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_restore_jobs, + ) + + +class AsyncioManageIndexesApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __configure_index(self, index_name, configure_index_request, **kwargs): + """Configure an index # noqa: E501 + + Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + + + Args: + index_name (str): The name of the index to configure. + configure_index_request (ConfigureIndexRequest): The desired pod size and replica configuration for the index. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + IndexModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["configure_index_request"] = configure_index_request + return await self.call_with_http_info(**kwargs) + + self.configure_index = _AsyncioEndpoint( + settings={ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}", + "operation_id": "configure_index", + "http_method": "PATCH", + "servers": None, + }, + params_map={ + "all": ["index_name", "configure_index_request"], + "required": ["index_name", "configure_index_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "configure_index_request": (ConfigureIndexRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "configure_index_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__configure_index, + ) + + async def __create_backup(self, index_name, create_backup_request, **kwargs): + """Create a backup of an index # noqa: E501 + + Create a backup of an index. # noqa: E501 + + + Args: + index_name (str): Name of the index to backup + create_backup_request (CreateBackupRequest): The desired configuration for the backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["create_backup_request"] = create_backup_request + return await self.call_with_http_info(**kwargs) + + self.create_backup = _AsyncioEndpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "create_backup", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["index_name", "create_backup_request"], + "required": ["index_name", "create_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "create_backup_request": (CreateBackupRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "create_backup_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_backup, + ) + + async def __create_collection(self, create_collection_request, **kwargs): + """Create a collection # noqa: E501 + + Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -930,17 +1575,148 @@ async def __create_index(self, create_index_request, **kwargs): }, headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_index, + callable=__create_index, + ) + + async def __create_index_for_model(self, create_index_for_model_request, **kwargs): + """Create an index with integrated embedding # noqa: E501 + + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 + + + Args: + create_index_for_model_request (CreateIndexForModelRequest): The desired configuration for the index and associated embedding model. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + IndexModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["create_index_for_model_request"] = create_index_for_model_request + return await self.call_with_http_info(**kwargs) + + self.create_index_for_model = _AsyncioEndpoint( + settings={ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/create-for-model", + "operation_id": "create_index_for_model", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["create_index_for_model_request"], + "required": ["create_index_for_model_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"create_index_for_model_request": (CreateIndexForModelRequest,)}, + "attribute_map": {}, + "location_map": {"create_index_for_model_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_index_for_model, + ) + + async def __create_index_from_backup( + self, backup_id, create_index_from_backup_request, **kwargs + ): + """Create an index from a backup # noqa: E501 + + Create an index from a backup. # noqa: E501 + + + Args: + backup_id (str): The ID of the backup to create an index from. + create_index_from_backup_request (CreateIndexFromBackupRequest): The desired configuration for the index created from a backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + None + """ + self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id + kwargs["create_index_from_backup_request"] = create_index_from_backup_request + return await self.call_with_http_info(**kwargs) + + self.create_index_from_backup = _AsyncioEndpoint( + settings={ + "response_type": None, + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups/{backup_id}/create-index", + "operation_id": "create_index_from_backup", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["backup_id", "create_index_from_backup_request"], + "required": ["backup_id", "create_index_from_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "backup_id": (str,), + "create_index_from_backup_request": (CreateIndexFromBackupRequest,), + }, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path", "create_index_from_backup_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_index_from_backup, ) - async def __create_index_for_model(self, create_index_for_model_request, **kwargs): - """Create an index with integrated embedding # noqa: E501 + async def __delete_backup(self, backup_id, **kwargs): + """Delete a backup # noqa: E501 - Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 + Delete a backup. # noqa: E501 Args: - create_index_for_model_request (CreateIndexForModelRequest): The desired configuration for the index and associated embedding model. + backup_id (str): The ID of the backup to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -960,24 +1736,24 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg Default is True. Returns: - IndexModel + None """ self._process_openapi_kwargs(kwargs) - kwargs["create_index_for_model_request"] = create_index_for_model_request + kwargs["backup_id"] = backup_id return await self.call_with_http_info(**kwargs) - self.create_index_for_model = _AsyncioEndpoint( + self.delete_backup = _AsyncioEndpoint( settings={ - "response_type": (IndexModel,), + "response_type": None, "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes/create-for-model", - "operation_id": "create_index_for_model", - "http_method": "POST", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "delete_backup", + "http_method": "DELETE", "servers": None, }, params_map={ - "all": ["create_index_for_model_request"], - "required": ["create_index_for_model_request"], + "all": ["backup_id"], + "required": ["backup_id"], "nullable": [], "enum": [], "validation": [], @@ -985,14 +1761,14 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"create_index_for_model_request": (CreateIndexForModelRequest,)}, - "attribute_map": {}, - "location_map": {"create_index_for_model_request": "body"}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__create_index_for_model, + callable=__delete_backup, ) async def __delete_collection(self, collection_name, **kwargs): @@ -1119,6 +1895,68 @@ async def __delete_index(self, index_name, **kwargs): callable=__delete_index, ) + async def __describe_backup(self, backup_id, **kwargs): + """Describe a backup # noqa: E501 + + Get a description of a backup. # noqa: E501 + + + Args: + backup_id (str): The ID of the backup to describe. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id + return await self.call_with_http_info(**kwargs) + + self.describe_backup = _AsyncioEndpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups/{backup_id}", + "operation_id": "describe_backup", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["backup_id"], + "required": ["backup_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_backup, + ) + async def __describe_collection(self, collection_name, **kwargs): """Describe a collection # noqa: E501 @@ -1243,6 +2081,68 @@ async def __describe_index(self, index_name, **kwargs): callable=__describe_index, ) + async def __describe_restore_job(self, job_id, **kwargs): + """Describe a restore job # noqa: E501 + + Get a description of a restore job. # noqa: E501 + + + Args: + job_id (str): The ID of the restore job to describe. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + RestoreJobModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["job_id"] = job_id + return await self.call_with_http_info(**kwargs) + + self.describe_restore_job = _AsyncioEndpoint( + settings={ + "response_type": (RestoreJobModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs/{job_id}", + "operation_id": "describe_restore_job", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["job_id"], + "required": ["job_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"job_id": (str,)}, + "attribute_map": {"job_id": "job_id"}, + "location_map": {"job_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_restore_job, + ) + async def __list_collections(self, **kwargs): """List collections # noqa: E501 @@ -1296,6 +2196,82 @@ async def __list_collections(self, **kwargs): callable=__list_collections, ) + async def __list_index_backups(self, index_name, **kwargs): + """List backups for an index # noqa: E501 + + List all backups for an index. # noqa: E501 + + + Args: + index_name (str): Name of the backed up index + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupList + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + return await self.call_with_http_info(**kwargs) + + self.list_index_backups = _AsyncioEndpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "list_index_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["index_name", "limit", "pagination_token"], + "required": ["index_name"], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "limit": (int,), + "pagination_token": (str,), + }, + "attribute_map": { + "index_name": "index_name", + "limit": "limit", + "pagination_token": "paginationToken", + }, + "location_map": { + "index_name": "path", + "limit": "query", + "pagination_token": "query", + }, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_index_backups, + ) + async def __list_indexes(self, **kwargs): """List indexes # noqa: E501 @@ -1348,3 +2324,117 @@ async def __list_indexes(self, **kwargs): api_client=api_client, callable=__list_indexes, ) + + async def __list_project_backups(self, **kwargs): + """List backups for all indexes in a project # noqa: E501 + + List all backups for a project. # noqa: E501 + + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_project_backups = _AsyncioEndpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups", + "operation_id": "list_project_backups", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_project_backups, + ) + + async def __list_restore_jobs(self, **kwargs): + """List restore jobs # noqa: E501 + + List all restore jobs for a project. # noqa: E501 + + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + RestoreJobList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_restore_jobs = _AsyncioEndpoint( + settings={ + "response_type": (RestoreJobList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs", + "operation_id": "list_restore_jobs", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_restore_jobs, + ) diff --git a/pinecone/core/openapi/db_control/model/backup_list.py b/pinecone/core/openapi/db_control/model/backup_list.py new file mode 100644 index 00000000..89777f75 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/backup_list.py @@ -0,0 +1,274 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.backup_model import BackupModel + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + + globals()["BackupModel"] = BackupModel + globals()["PaginationResponse"] = PaginationResponse + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="BackupList") + + +class BackupList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([BackupModel],), # noqa: E501 + "pagination": (PaginationResponse,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """BackupList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([BackupModel]): [optional] # noqa: E501 + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """BackupList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([BackupModel]): [optional] # noqa: E501 + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/backup_model.py b/pinecone/core/openapi/db_control/model/backup_model.py new file mode 100644 index 00000000..fd7da209 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/backup_model.py @@ -0,0 +1,356 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + + globals()["IndexTags"] = IndexTags + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="BackupModel") + + +class BackupModel(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("metric",): {"COSINE": "cosine", "EUCLIDEAN": "euclidean", "DOTPRODUCT": "dotproduct"} + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "backup_id": (str,), # noqa: E501 + "source_index_name": (str,), # noqa: E501 + "source_index_id": (str,), # noqa: E501 + "status": (str,), # noqa: E501 + "cloud": (str,), # noqa: E501 + "region": (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "description": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "metric": (str,), # noqa: E501 + "record_count": (int,), # noqa: E501 + "namespace_count": (int,), # noqa: E501 + "size_bytes": (int,), # noqa: E501 + "tags": (IndexTags,), # noqa: E501 + "created_at": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "backup_id": "backup_id", # noqa: E501 + "source_index_name": "source_index_name", # noqa: E501 + "source_index_id": "source_index_id", # noqa: E501 + "status": "status", # noqa: E501 + "cloud": "cloud", # noqa: E501 + "region": "region", # noqa: E501 + "name": "name", # noqa: E501 + "description": "description", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "metric": "metric", # noqa: E501 + "record_count": "record_count", # noqa: E501 + "namespace_count": "namespace_count", # noqa: E501 + "size_bytes": "size_bytes", # noqa: E501 + "tags": "tags", # noqa: E501 + "created_at": "created_at", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], + backup_id, + source_index_name, + source_index_id, + status, + cloud, + region, + *args, + **kwargs, + ) -> T: # noqa: E501 + """BackupModel - a model defined in OpenAPI + + Args: + backup_id (str): Unique identifier for the backup. + source_index_name (str): Name of the index from which the backup was taken. + source_index_id (str): ID of the index. + status (str): Current status of the backup (e.g., Initializing, Ready, Failed). + cloud (str): Cloud provider where the backup is stored. + region (str): Cloud region where the backup is stored. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): Optional user-defined name for the backup. [optional] # noqa: E501 + description (str): Optional description providing context for the backup. [optional] # noqa: E501 + dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 + metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. [optional] # noqa: E501 + record_count (int): Total number of records in the backup. [optional] # noqa: E501 + namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501 + size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501 + tags (IndexTags): [optional] # noqa: E501 + created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.backup_id = backup_id + self.source_index_name = source_index_name + self.source_index_id = source_index_id + self.status = status + self.cloud = cloud + self.region = region + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, backup_id, source_index_name, source_index_id, status, cloud, region, *args, **kwargs + ) -> None: # noqa: E501 + """BackupModel - a model defined in OpenAPI + + Args: + backup_id (str): Unique identifier for the backup. + source_index_name (str): Name of the index from which the backup was taken. + source_index_id (str): ID of the index. + status (str): Current status of the backup (e.g., Initializing, Ready, Failed). + cloud (str): Cloud provider where the backup is stored. + region (str): Cloud region where the backup is stored. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): Optional user-defined name for the backup. [optional] # noqa: E501 + description (str): Optional description providing context for the backup. [optional] # noqa: E501 + dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 + metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. [optional] # noqa: E501 + record_count (int): Total number of records in the backup. [optional] # noqa: E501 + namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501 + size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501 + tags (IndexTags): [optional] # noqa: E501 + created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.backup_id = backup_id + self.source_index_name = source_index_name + self.source_index_id = source_index_id + self.status = status + self.cloud = cloud + self.region = region + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py new file mode 100644 index 00000000..106e9ab8 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -0,0 +1,266 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateBackupRequest") + + +class CreateBackupRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 45, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "description": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "description": "description", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """CreateBackupRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. [optional] # noqa: E501 + description (str): A description of the backup. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """CreateBackupRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. [optional] # noqa: E501 + description (str): A description of the backup. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py new file mode 100644 index 00000000..99f713b2 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py @@ -0,0 +1,286 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.deletion_protection import DeletionProtection + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + + globals()["DeletionProtection"] = DeletionProtection + globals()["IndexTags"] = IndexTags + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateIndexFromBackupRequest") + + +class CreateIndexFromBackupRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 45, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "name": (str,), # noqa: E501 + "tags": (IndexTags,), # noqa: E501 + "deletion_protection": (DeletionProtection,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "tags": "tags", # noqa: E501 + "deletion_protection": "deletion_protection", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 + """CreateIndexFromBackupRequest - a model defined in OpenAPI + + Args: + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + tags (IndexTags): [optional] # noqa: E501 + deletion_protection (DeletionProtection): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 + """CreateIndexFromBackupRequest - a model defined in OpenAPI + + Args: + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + tags (IndexTags): [optional] # noqa: E501 + deletion_protection (DeletionProtection): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/dedicated_spec.py b/pinecone/core/openapi/db_control/model/dedicated_spec.py new file mode 100644 index 00000000..cb2c5d20 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/dedicated_spec.py @@ -0,0 +1,266 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="DedicatedSpec") + + +class DedicatedSpec(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "environment": (str,) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "environment": "environment" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa: E501 + """DedicatedSpec - a model defined in OpenAPI + + Args: + environment (str): The environment where the index is hosted. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.environment = environment + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 + """DedicatedSpec - a model defined in OpenAPI + + Args: + environment (str): The environment where the index is hosted. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.environment = environment + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index b75b803f..6bfa0a75 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -28,9 +28,11 @@ def lazy_import(): + from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + globals()["DedicatedSpec"] = DedicatedSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -94,6 +96,7 @@ def openapi_types(cls): """ lazy_import() return { + "dedicated": (DedicatedSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 "serverless": (ServerlessSpec,), # noqa: E501 } @@ -103,6 +106,7 @@ def discriminator(cls): return None attribute_map: Dict[str, str] = { + "dedicated": "dedicated", # noqa: E501 "pod": "pod", # noqa: E501 "serverless": "serverless", # noqa: E501 } @@ -147,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + dedicated (DedicatedSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ @@ -232,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + dedicated (DedicatedSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index add2979e..cee103f2 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -28,9 +28,11 @@ def lazy_import(): + from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + globals()["DedicatedSpec"] = DedicatedSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -89,6 +91,7 @@ def openapi_types(cls): return { "serverless": (ServerlessSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 + "dedicated": (DedicatedSpec,), # noqa: E501 } @cached_class_property @@ -98,6 +101,7 @@ def discriminator(cls): attribute_map: Dict[str, str] = { "serverless": "serverless", # noqa: E501 "pod": "pod", # noqa: E501 + "dedicated": "dedicated", # noqa: E501 } read_only_vars: Set[str] = set([]) @@ -142,6 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 + dedicated (DedicatedSpec): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -227,6 +232,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 + dedicated (DedicatedSpec): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/db_control/model/pagination_response.py b/pinecone/core/openapi/db_control/model/pagination_response.py new file mode 100644 index 00000000..3a0182a5 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/pagination_response.py @@ -0,0 +1,266 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="PaginationResponse") + + +class PaginationResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "next": (str,) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "next": "next" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 + """PaginationResponse - a model defined in OpenAPI + + Args: + next (str): The token to use to retrieve the next page of results. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.next = next + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 + """PaginationResponse - a model defined in OpenAPI + + Args: + next (str): The token to use to retrieve the next page of results. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.next = next + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/restore_job_list.py b/pinecone/core/openapi/db_control/model/restore_job_list.py new file mode 100644 index 00000000..fb66e233 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/restore_job_list.py @@ -0,0 +1,280 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel + + globals()["PaginationResponse"] = PaginationResponse + globals()["RestoreJobModel"] = RestoreJobModel + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="RestoreJobList") + + +class RestoreJobList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([RestoreJobModel],), # noqa: E501 + "pagination": (PaginationResponse,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 + """RestoreJobList - a model defined in OpenAPI + + Args: + data ([RestoreJobModel]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.data = data + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 + """RestoreJobList - a model defined in OpenAPI + + Args: + data ([RestoreJobModel]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.data = data + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/restore_job_model.py b/pinecone/core/openapi/db_control/model/restore_job_model.py new file mode 100644 index 00000000..8f541188 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/restore_job_model.py @@ -0,0 +1,326 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="RestoreJobModel") + + +class RestoreJobModel(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("percent_complete",): {"inclusive_maximum": 100.0, "inclusive_minimum": 0.0} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "restore_job_id": (str,), # noqa: E501 + "backup_id": (str,), # noqa: E501 + "target_index_name": (str,), # noqa: E501 + "target_index_id": (str,), # noqa: E501 + "status": (str,), # noqa: E501 + "created_at": (datetime,), # noqa: E501 + "completed_at": (datetime,), # noqa: E501 + "percent_complete": (float,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "restore_job_id": "restore_job_id", # noqa: E501 + "backup_id": "backup_id", # noqa: E501 + "target_index_name": "target_index_name", # noqa: E501 + "target_index_id": "target_index_id", # noqa: E501 + "status": "status", # noqa: E501 + "created_at": "created_at", # noqa: E501 + "completed_at": "completed_at", # noqa: E501 + "percent_complete": "percent_complete", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], + restore_job_id, + backup_id, + target_index_name, + target_index_id, + status, + created_at, + *args, + **kwargs, + ) -> T: # noqa: E501 + """RestoreJobModel - a model defined in OpenAPI + + Args: + restore_job_id (str): Unique identifier for the restore job + backup_id (str): Backup used for the restore + target_index_name (str): Name of the index into which data is being restored + target_index_id (str): ID of the index + status (str): Status of the restore job + created_at (datetime): Timestamp when the restore job started + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + completed_at (datetime): Timestamp when the restore job finished [optional] # noqa: E501 + percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.backup_id = backup_id + self.target_index_name = target_index_name + self.target_index_id = target_index_id + self.status = status + self.created_at = created_at + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, + restore_job_id, + backup_id, + target_index_name, + target_index_id, + status, + created_at, + *args, + **kwargs, + ) -> None: # noqa: E501 + """RestoreJobModel - a model defined in OpenAPI + + Args: + restore_job_id (str): Unique identifier for the restore job + backup_id (str): Backup used for the restore + target_index_name (str): Name of the index into which data is being restored + target_index_id (str): ID of the index + status (str): Status of the restore job + created_at (datetime): Timestamp when the restore job started + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + completed_at (datetime): Timestamp when the restore job finished [optional] # noqa: E501 + percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.backup_id = backup_id + self.target_index_name = target_index_name + self.target_index_id = target_index_id + self.status = status + self.created_at = created_at + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/models/__init__.py b/pinecone/core/openapi/db_control/models/__init__.py index fd82f121..59b93661 100644 --- a/pinecone/core/openapi/db_control/models/__init__.py +++ b/pinecone/core/openapi/db_control/models/__init__.py @@ -9,6 +9,8 @@ # import sys # sys.setrecursionlimit(n) +from pinecone.core.openapi.db_control.model.backup_list import BackupList +from pinecone.core.openapi.db_control.model.backup_model import BackupModel from pinecone.core.openapi.db_control.model.collection_list import CollectionList from pinecone.core.openapi.db_control.model.collection_model import CollectionModel from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest @@ -21,6 +23,7 @@ from pinecone.core.openapi.db_control.model.configure_index_request_spec_pod import ( ConfigureIndexRequestSpecPod, ) +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, @@ -28,7 +31,11 @@ from pinecone.core.openapi.db_control.model.create_index_for_model_request_embed import ( CreateIndexForModelRequestEmbed, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest +from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec from pinecone.core.openapi.db_control.model.deletion_protection import DeletionProtection from pinecone.core.openapi.db_control.model.error_response import ErrorResponse from pinecone.core.openapi.db_control.model.error_response_error import ErrorResponseError @@ -39,6 +46,9 @@ from pinecone.core.openapi.db_control.model.index_spec import IndexSpec from pinecone.core.openapi.db_control.model.index_tags import IndexTags from pinecone.core.openapi.db_control.model.model_index_embed import ModelIndexEmbed +from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig +from pinecone.core.openapi.db_control.model.restore_job_list import RestoreJobList +from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 6a425a03..3c9ec25b 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -26,6 +26,8 @@ from pinecone.core.openapi.inference.model.embed_request import EmbedRequest from pinecone.core.openapi.inference.model.embeddings_list import EmbeddingsList from pinecone.core.openapi.inference.model.error_response import ErrorResponse +from pinecone.core.openapi.inference.model.model_info import ModelInfo +from pinecone.core.openapi.inference.model.model_info_list import ModelInfoList from pinecone.core.openapi.inference.model.rerank_request import RerankRequest from pinecone.core.openapi.inference.model.rerank_result import RerankResult @@ -108,6 +110,146 @@ def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): callable=__embed, ) + def __get_model(self, model_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get available model details. # noqa: E501 + + Get model details. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_model(model_name, async_req=True) + >>> result = thread.get() + + Args: + model_name (str): The name of the model to look up. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ModelInfo + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["model_name"] = model_name + return self.call_with_http_info(**kwargs) + + self.get_model = _Endpoint( + settings={ + "response_type": (ModelInfo,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models/{model_name}", + "operation_id": "get_model", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["model_name"], + "required": ["model_name"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"model_name": (str,)}, + "attribute_map": {"model_name": "model_name"}, + "location_map": {"model_name": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__get_model, + ) + + def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get available models. # noqa: E501 + + Get available models. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_models(async_req=True) + >>> result = thread.get() + + + Keyword Args: + type (str): Filter models by type ('embed' or 'rerank'). [optional] + vector_type (str): Filter embedding models by vector type ('dense' or 'sparse'). Only relevant when `type=embed`. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ModelInfoList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_models = _Endpoint( + settings={ + "response_type": (ModelInfoList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models", + "operation_id": "list_models", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["type", "vector_type"], + "required": [], + "nullable": [], + "enum": ["type", "vector_type"], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": { + ("type",): {"EMBED": "embed", "RERANK": "rerank"}, + ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, + }, + "openapi_types": {"type": (str,), "vector_type": (str,)}, + "attribute_map": {"type": "type", "vector_type": "vector_type"}, + "location_map": {"type": "query", "vector_type": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_models, + ) + def __rerank(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Rerank documents # noqa: E501 @@ -247,6 +389,132 @@ async def __embed(self, **kwargs): callable=__embed, ) + async def __get_model(self, model_name, **kwargs): + """Get available model details. # noqa: E501 + + Get model details. # noqa: E501 + + + Args: + model_name (str): The name of the model to look up. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ModelInfo + """ + self._process_openapi_kwargs(kwargs) + kwargs["model_name"] = model_name + return await self.call_with_http_info(**kwargs) + + self.get_model = _AsyncioEndpoint( + settings={ + "response_type": (ModelInfo,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models/{model_name}", + "operation_id": "get_model", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["model_name"], + "required": ["model_name"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"model_name": (str,)}, + "attribute_map": {"model_name": "model_name"}, + "location_map": {"model_name": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__get_model, + ) + + async def __list_models(self, **kwargs): + """Get available models. # noqa: E501 + + Get available models. # noqa: E501 + + + + Keyword Args: + type (str): Filter models by type ('embed' or 'rerank'). [optional] + vector_type (str): Filter embedding models by vector type ('dense' or 'sparse'). Only relevant when `type=embed`. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ModelInfoList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_models = _AsyncioEndpoint( + settings={ + "response_type": (ModelInfoList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models", + "operation_id": "list_models", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["type", "vector_type"], + "required": [], + "nullable": [], + "enum": ["type", "vector_type"], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": { + ("type",): {"EMBED": "embed", "RERANK": "rerank"}, + ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, + }, + "openapi_types": {"type": (str,), "vector_type": (str,)}, + "attribute_map": {"type": "type", "vector_type": "vector_type"}, + "location_map": {"type": "query", "vector_type": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_models, + ) + async def __rerank(self, **kwargs): """Rerank documents # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py new file mode 100644 index 00000000..2ce733cc --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -0,0 +1,333 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( + ModelInfoSupportedMetrics, + ) + from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter, + ) + + globals()["ModelInfoSupportedMetrics"] = ModelInfoSupportedMetrics + globals()["ModelInfoSupportedParameter"] = ModelInfoSupportedParameter + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfo") + + +class ModelInfo(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("type",): {"EMBED": "embed", "RERANK": "rerank"}, + ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1}, + ("sequence_length",): {"inclusive_minimum": 1}, + ("batch_size",): {"inclusive_minimum": 1}, + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "name": (str,), # noqa: E501 + "short_description": (str,), # noqa: E501 + "type": (str,), # noqa: E501 + "supported_parameters": ([ModelInfoSupportedParameter],), # noqa: E501 + "vector_type": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "modality": (str,), # noqa: E501 + "sequence_length": (int,), # noqa: E501 + "batch_size": (int,), # noqa: E501 + "supported_metrics": (ModelInfoSupportedMetrics,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "short_description": "short_description", # noqa: E501 + "type": "type", # noqa: E501 + "supported_parameters": "supported_parameters", # noqa: E501 + "vector_type": "vector_type", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "modality": "modality", # noqa: E501 + "sequence_length": "sequence_length", # noqa: E501 + "batch_size": "batch_size", # noqa: E501 + "supported_metrics": "supported_metrics", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], name, short_description, type, supported_parameters, *args, **kwargs + ) -> T: # noqa: E501 + """ModelInfo - a model defined in OpenAPI + + Args: + name (str): The name of the model. + short_description (str): A summary of the model. + type (str): The type of model (e.g. 'embed' or 'rerank'). + supported_parameters ([ModelInfoSupportedParameter]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 + dimension (int): The embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 + sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + self.short_description = short_description + self.type = type + self.supported_parameters = supported_parameters + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, name, short_description, type, supported_parameters, *args, **kwargs + ) -> None: # noqa: E501 + """ModelInfo - a model defined in OpenAPI + + Args: + name (str): The name of the model. + short_description (str): A summary of the model. + type (str): The type of model (e.g. 'embed' or 'rerank'). + supported_parameters ([ModelInfoSupportedParameter]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 + dimension (int): The embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 + sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + self.short_description = short_description + self.type = type + self.supported_parameters = supported_parameters + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/model/model_info_list.py b/pinecone/core/openapi/inference/model/model_info_list.py new file mode 100644 index 00000000..9d176c0f --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_list.py @@ -0,0 +1,268 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info import ModelInfo + + globals()["ModelInfo"] = ModelInfo + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoList") + + +class ModelInfoList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "models": ([ModelInfo],) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "models": "models" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """ModelInfoList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + models ([ModelInfo]): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """ModelInfoList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + models ([ModelInfo]): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/model/model_info_metric.py b/pinecone/core/openapi/inference/model/model_info_metric.py new file mode 100644 index 00000000..85dd0457 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_metric.py @@ -0,0 +1,284 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoMetric") + + +class ModelInfoMetric(ModelSimple): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("value",): {"COSINE": "cosine", "EUCLIDEAN": "euclidean", "DOTPRODUCT": "dotproduct"} + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return {"value": (str,)} + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = {} + + read_only_vars: Set[str] = set() + + _composed_schemas = None + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: + """ModelInfoMetric - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] (str): A distance metric that the embedding model supports for similarity searches.., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 + + Keyword Args: + value (str): A distance metric that the embedding model supports for similarity searches.., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: + """ModelInfoMetric - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] (str): A distance metric that the embedding model supports for similarity searches., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 + + Keyword Args: + value (str): A distance metric that the embedding model supports for similarity searches., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + return self diff --git a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py new file mode 100644 index 00000000..96b2bb41 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py @@ -0,0 +1,290 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info_metric import ModelInfoMetric + + globals()["ModelInfoMetric"] = ModelInfoMetric + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoSupportedMetrics") + + +class ModelInfoSupportedMetrics(ModelSimple): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return {"value": ([ModelInfoMetric],)} + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = {} + + read_only_vars: Set[str] = set() + + _composed_schemas = None + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: + """ModelInfoSupportedMetrics - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] ([ModelInfoMetric]): The distance metrics supported by the model for similarity search.. # noqa: E501 + + Keyword Args: + value ([ModelInfoMetric]): The distance metrics supported by the model for similarity search.. # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: + """ModelInfoSupportedMetrics - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] ([ModelInfoMetric]): The distance metrics supported by the model for similarity search. # noqa: E501 + + Keyword Args: + value ([ModelInfoMetric]): The distance metrics supported by the model for similarity search. # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + return self diff --git a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py new file mode 100644 index 00000000..56b17ac3 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py @@ -0,0 +1,302 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoSupportedParameter") + + +class ModelInfoSupportedParameter(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "parameter": (str,), # noqa: E501 + "type": (str,), # noqa: E501 + "value_type": (str,), # noqa: E501 + "required": (bool,), # noqa: E501 + "allowed_values": ([dict],), # noqa: E501 + "min": (float,), # noqa: E501 + "max": (float,), # noqa: E501 + "default": (dict,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "parameter": "parameter", # noqa: E501 + "type": "type", # noqa: E501 + "value_type": "value_type", # noqa: E501 + "required": "required", # noqa: E501 + "allowed_values": "allowed_values", # noqa: E501 + "min": "min", # noqa: E501 + "max": "max", # noqa: E501 + "default": "default", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], parameter, type, value_type, required, *args, **kwargs + ) -> T: # noqa: E501 + """ModelInfoSupportedParameter - a model defined in OpenAPI + + Args: + parameter (str): The name of the parameter. + type (str): The parameter type e.g. 'one_of', 'numeric_range', or 'any'. If the type is 'one_of', then 'allowed_values' will be set, and the value specified must be one of the allowed values. 'one_of' is only compatible with value_type 'string' or 'integer'. If 'numeric_range', then 'min' and 'max' will be set, then the value specified must adhere to the value_type and must fall within the `[min, max]` range (inclusive). If 'any' then any value is allowed, as long as it adheres to the value_type. + value_type (str): The type of value the parameter accepts, e.g. 'string', 'integer', 'float', or 'boolean'. + required (bool): Whether the parameter is required (true) or optional (false). + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + allowed_values ([dict]): The allowed parameter values when the type is 'one_of'. [optional] # noqa: E501 + min (float): The minimum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + max (float): The maximum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.parameter = parameter + self.type = type + self.value_type = value_type + self.required = required + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> None: # noqa: E501 + """ModelInfoSupportedParameter - a model defined in OpenAPI + + Args: + parameter (str): The name of the parameter. + type (str): The parameter type e.g. 'one_of', 'numeric_range', or 'any'. If the type is 'one_of', then 'allowed_values' will be set, and the value specified must be one of the allowed values. 'one_of' is only compatible with value_type 'string' or 'integer'. If 'numeric_range', then 'min' and 'max' will be set, then the value specified must adhere to the value_type and must fall within the `[min, max]` range (inclusive). If 'any' then any value is allowed, as long as it adheres to the value_type. + value_type (str): The type of value the parameter accepts, e.g. 'string', 'integer', 'float', or 'boolean'. + required (bool): Whether the parameter is required (true) or optional (false). + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + allowed_values ([dict]): The allowed parameter values when the type is 'one_of'. [optional] # noqa: E501 + min (float): The minimum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + max (float): The maximum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.parameter = parameter + self.type = type + self.value_type = value_type + self.required = required + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/models/__init__.py b/pinecone/core/openapi/inference/models/__init__.py index bbb2cd34..f338afe2 100644 --- a/pinecone/core/openapi/inference/models/__init__.py +++ b/pinecone/core/openapi/inference/models/__init__.py @@ -18,6 +18,15 @@ from pinecone.core.openapi.inference.model.embeddings_list_usage import EmbeddingsListUsage from pinecone.core.openapi.inference.model.error_response import ErrorResponse from pinecone.core.openapi.inference.model.error_response_error import ErrorResponseError +from pinecone.core.openapi.inference.model.model_info import ModelInfo +from pinecone.core.openapi.inference.model.model_info_list import ModelInfoList +from pinecone.core.openapi.inference.model.model_info_metric import ModelInfoMetric +from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( + ModelInfoSupportedMetrics, +) +from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter, +) from pinecone.core.openapi.inference.model.ranked_document import RankedDocument from pinecone.core.openapi.inference.model.rerank_request import RerankRequest from pinecone.core.openapi.inference.model.rerank_result import RerankResult From c0d7aef2aaef5b1b8ad16a38bed92d148180f582 Mon Sep 17 00:00:00 2001 From: Avi Mizrahi Date: Tue, 25 Mar 2025 17:25:15 +0200 Subject: [PATCH 05/48] [KE] Add assistant description (#469) ## Problem There were no info on assistant in the readme ## Solution Now there is info on assistant in the readme ## Type of Change - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] This change requires a documentation update - [ ] Infrastructure change (CI configs, etc) - [x] Non-code change (docs, etc) - [ ] None of the above: (explain here) ## Test Plan Describe specific steps for validating this change. --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 627b075e..60b7b3ae 100644 --- a/README.md +++ b/README.md @@ -189,6 +189,17 @@ response = index.search_records( ) ``` +## Pinecone Assistant +### Installing the Pinecone Assistant Python plugin + +To interact with Pinecone Assistant using the Python SDK, install the `pinecone-plugin-assistant` package: + +```shell +pip install --upgrade pinecone pinecone-plugin-assistant +``` +For more information on Pinecone Assistant, see the [Pinecone Assistant documentation](https://docs.pinecone.io/guides/assistant/overview). + + ## More information on usage Detailed information on specific ways of using the SDK are covered in these other pages. From 47f5f3033aefebd32839471851c171c122cb69fc Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 23 Apr 2025 13:32:45 -0400 Subject: [PATCH 06/48] Remove urllib3 info from useragent string --- pinecone/utils/user_agent.py | 10 ++--- tests/unit/utils/test_setup_openapi_client.py | 6 +-- tests/unit/utils/test_user_agent.py | 40 +++++++++++++------ 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/pinecone/utils/user_agent.py b/pinecone/utils/user_agent.py index 845a0ab1..b52a4edb 100644 --- a/pinecone/utils/user_agent.py +++ b/pinecone/utils/user_agent.py @@ -1,5 +1,3 @@ -import urllib3 - from .version import __version__ from .constants import SOURCE_TAG import re @@ -19,11 +17,11 @@ def _build_source_tag_field(source_tag): def _get_user_agent(client_id, config): - user_agent_details = {"urllib3": urllib3.__version__} - user_agent = "{} ({})".format( - client_id, ", ".join([f"{k}:{v}" for k, v in user_agent_details.items()]) + user_agent = ( + f"{client_id}; {_build_source_tag_field(config.source_tag)}" + if config.source_tag + else client_id ) - user_agent += f"; {_build_source_tag_field(config.source_tag)}" if config.source_tag else "" return user_agent diff --git a/tests/unit/utils/test_setup_openapi_client.py b/tests/unit/utils/test_setup_openapi_client.py index acd96c34..199e4ae2 100644 --- a/tests/unit/utils/test_setup_openapi_client.py +++ b/tests/unit/utils/test_setup_openapi_client.py @@ -19,7 +19,7 @@ def test_setup_openapi_client(self): openapi_config=openapi_config, pool_threads=2, ) - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) assert re.match( user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"] @@ -38,7 +38,7 @@ def test_setup_openapi_client_with_api_version(self): pool_threads=2, api_version="2024-04", ) - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) assert re.match( user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"] @@ -102,7 +102,7 @@ def test_setup_openapi_client_with_host_override(self, plugin_api_version, plugi assert isinstance(plugin_client, plugin_api) # We want requests from plugins to have a user-agent matching the host SDK. - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, plugin_client.api_client.user_agent) assert re.match(user_agent_regex, plugin_client.api_client.default_headers["User-Agent"]) diff --git a/tests/unit/utils/test_user_agent.py b/tests/unit/utils/test_user_agent.py index 58cdfbbf..4eb5b06e 100644 --- a/tests/unit/utils/test_user_agent.py +++ b/tests/unit/utils/test_user_agent.py @@ -7,30 +7,38 @@ class TestUserAgent: def test_user_agent(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") useragent = get_user_agent(config) - assert re.search(r"python-client-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None + assert re.search(r"^python-client-\d+\.\d+\.\d+$", useragent) is not None def test_user_agent_with_source_tag(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" ) useragent = get_user_agent(config) - assert re.search(r"python-client-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) def test_source_tag_is_normalized(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my source tag!!!!" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) + assert "!!!!" not in useragent config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="My Source Tag" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) + assert "My Source Tag" not in useragent config = ConfigBuilder.build( api_key="my-api-key", @@ -46,25 +54,31 @@ def test_source_tag_is_normalized(self): source_tag=" My Source Tag 123 #### !! ", ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag_123", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag_123$", useragent) + is not None + ) config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="colon:allowed" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=colon:allowed", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=colon:allowed$", useragent) + is not None + ) def test_user_agent_grpc(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") useragent = get_user_agent_grpc(config) assert re.search(r"python-client\[grpc\]-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None def test_user_agent_grpc_with_source_tag(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" ) useragent = get_user_agent_grpc(config) - assert re.search(r"python-client\[grpc\]-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client\[grpc\]-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) From aaa7104e00e07f89891d8bfa1d3dd5d40be20c09 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 11 Apr 2025 12:34:30 -0400 Subject: [PATCH 07/48] Refactor PluginAware to do lazy loading --- pinecone/control/pinecone.py | 7 +- pinecone/data/features/inference/inference.py | 4 +- pinecone/data/index.py | 6 +- pinecone/utils/plugin_aware.py | 92 +++++++++++++++++-- 4 files changed, 91 insertions(+), 18 deletions(-) diff --git a/pinecone/control/pinecone.py b/pinecone/control/pinecone.py index f3c8f404..17b3d953 100644 --- a/pinecone/control/pinecone.py +++ b/pinecone/control/pinecone.py @@ -43,7 +43,7 @@ """ @private """ -class Pinecone(PineconeDBControlInterface, PluginAware): +class Pinecone(PluginAware, PineconeDBControlInterface): """ A client for interacting with Pinecone's vector database. @@ -107,9 +107,8 @@ def __init__( self.index_host_store = IndexHostStore() """ @private """ - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads - ) + # Initialize PluginAware first, which will then call PineconeDBControlInterface.__init__ + super().__init__() @property def inference(self): diff --git a/pinecone/data/features/inference/inference.py b/pinecone/data/features/inference/inference.py index 71ada564..9ab34e33 100644 --- a/pinecone/data/features/inference/inference.py +++ b/pinecone/data/features/inference/inference.py @@ -63,9 +63,7 @@ def __init__(self, config, openapi_config, **kwargs) -> None: api_version=API_VERSION, ) - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads - ) + super().__init__() # Initialize PluginAware def embed( self, diff --git a/pinecone/data/index.py b/pinecone/data/index.py index ebd5cecd..a228bfbe 100644 --- a/pinecone/data/index.py +++ b/pinecone/data/index.py @@ -55,7 +55,7 @@ def parse_query_response(response: QueryResponse): return response -class Index(IndexInterface, ImportFeatureMixin, PluginAware): +class Index(PluginAware, IndexInterface, ImportFeatureMixin): """ A client for interacting with a Pinecone index via REST API. For improved performance, use the Pinecone GRPC index client. @@ -101,10 +101,6 @@ def __init__( # Pass the same api_client to the ImportFeatureMixin super().__init__(api_client=self._api_client) - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads - ) - def _openapi_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]: return filter_dict(kwargs, OPENAPI_ENDPOINT_PARAMS) diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index ce1e4b87..a99223e2 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -1,8 +1,8 @@ +from typing import Any from .setup_openapi_client import build_plugin_setup_client from pinecone.config import Config from pinecone.openapi_support.configuration import Configuration as OpenApiConfig - from pinecone_plugin_interface import load_and_install as install_plugins import logging @@ -11,17 +11,97 @@ class PluginAware: + """ + Base class for classes that support plugin loading. + + This class provides functionality to lazily load plugins when they are first accessed. + Subclasses must set the following attributes before calling super().__init__(): + - config: Config + - openapi_config: OpenApiConfig + - pool_threads: int + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """ + Initialize the PluginAware class. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Raises: + AttributeError: If required attributes are not set in the subclass. + """ + logger.debug("PluginAware __init__ called for %s", self.__class__.__name__) + + # Check for required attributes after super().__init__ has been called + missing_attrs = [] + if not hasattr(self, "config"): + missing_attrs.append("config") + if not hasattr(self, "openapi_config"): + missing_attrs.append("openapi_config") + if not hasattr(self, "pool_threads"): + missing_attrs.append("pool_threads") + + if missing_attrs: + raise AttributeError( + f"PluginAware class requires the following attributes: {', '.join(missing_attrs)}. " + f"These must be set in the {self.__class__.__name__} class's __init__ method " + f"before calling super().__init__()." + ) + + self._plugins_loaded = False + """ @private """ + + def __getattr__(self, name: str) -> Any: + """ + Called when an attribute is not found through the normal lookup process. + This allows for lazy loading of plugins when they are first accessed. + + Args: + name: The name of the attribute being accessed. + + Returns: + The requested attribute. + + Raises: + AttributeError: If the attribute cannot be found after loading plugins. + """ + if not self._plugins_loaded: + logger.debug("Loading plugins for %s", self.__class__.__name__) + self.load_plugins( + config=self.config, + openapi_config=self.openapi_config, + pool_threads=self.pool_threads, + ) + self._plugins_loaded = True + try: + return object.__getattribute__(self, name) + except AttributeError: + pass + + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + def load_plugins( self, config: Config, openapi_config: OpenApiConfig, pool_threads: int ) -> None: - """@private""" + """ + Load plugins for the parent class. + + Args: + config: The Pinecone configuration. + openapi_config: The OpenAPI configuration. + pool_threads: The number of threads in the pool. + """ try: - # I don't expect this to ever throw, but wrapping this in a - # try block just in case to make sure a bad plugin doesn't - # halt client initialization. + # Build the OpenAPI client for plugin setup openapi_client_builder = build_plugin_setup_client( config=config, openapi_config=openapi_config, pool_threads=pool_threads ) + # Install plugins install_plugins(self, openapi_client_builder) + logger.debug("Plugins loaded successfully for %s", self.__class__.__name__) + except ImportError as e: + logger.warning("Failed to import plugin module: %s", e) except Exception as e: - logger.error(f"Error loading plugins: {e}") + logger.error("Error loading plugins: %s", e, exc_info=True) From b3bc5a47e48876a1e9a478f3d71be230dd61dda4 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 11 Apr 2025 12:55:16 -0400 Subject: [PATCH 08/48] Fix unit test --- tests/unit/test_control.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_control.py b/tests/unit/test_control.py index c0b909dd..ad3b2872 100644 --- a/tests/unit/test_control.py +++ b/tests/unit/test_control.py @@ -77,9 +77,12 @@ def index_list_response(): class TestControl: - def test_plugins_are_installed(self): + def test_plugins_are_lazily_loaded(self): with patch.object(PluginAware, "load_plugins") as mock_install_plugins: - Pinecone(api_key="asdf") + pc = Pinecone(api_key="asdf") + mock_install_plugins.assert_not_called() + with pytest.raises(AttributeError): + pc.foo() # Accessing a non-existent attribute should raise an AttributeError after PluginAware installs any applicable plugins mock_install_plugins.assert_called_once() def test_default_host(self): From 7b9b383bc9c029ce3be1d292e32eadff4a2efb25 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Mon, 14 Apr 2025 11:13:13 -0400 Subject: [PATCH 09/48] Add unit tests for PluginAware --- pinecone/utils/plugin_aware.py | 35 ++++++++++++++++------- tests/unit/test_plugin_aware.py | 49 +++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 10 deletions(-) create mode 100644 tests/unit/test_plugin_aware.py diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index a99223e2..8410397a 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -34,6 +34,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: """ logger.debug("PluginAware __init__ called for %s", self.__class__.__name__) + self._plugins_loaded = False + """ @private """ + # Check for required attributes after super().__init__ has been called missing_attrs = [] if not hasattr(self, "config"): @@ -50,9 +53,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: f"before calling super().__init__()." ) - self._plugins_loaded = False - """ @private """ - def __getattr__(self, name: str) -> Any: """ Called when an attribute is not found through the normal lookup process. @@ -67,17 +67,32 @@ def __getattr__(self, name: str) -> Any: Raises: AttributeError: If the attribute cannot be found after loading plugins. """ + # Check if this is one of the required attributes that should be set by subclasses + required_attrs = ["config", "openapi_config", "pool_threads"] + if name in required_attrs: + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{name}'. " + f"This attribute must be set in the subclass's __init__ method " + f"before calling super().__init__()." + ) + if not self._plugins_loaded: logger.debug("Loading plugins for %s", self.__class__.__name__) - self.load_plugins( - config=self.config, - openapi_config=self.openapi_config, - pool_threads=self.pool_threads, - ) - self._plugins_loaded = True + # Use object.__getattribute__ to avoid triggering __getattr__ again try: - return object.__getattribute__(self, name) + config = object.__getattribute__(self, "config") + openapi_config = object.__getattribute__(self, "openapi_config") + pool_threads = object.__getattribute__(self, "pool_threads") + self.load_plugins( + config=config, openapi_config=openapi_config, pool_threads=pool_threads + ) + self._plugins_loaded = True + try: + return object.__getattribute__(self, name) + except AttributeError: + pass except AttributeError: + # If we can't get the required attributes, we can't load plugins pass raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") diff --git a/tests/unit/test_plugin_aware.py b/tests/unit/test_plugin_aware.py new file mode 100644 index 00000000..7f4329d1 --- /dev/null +++ b/tests/unit/test_plugin_aware.py @@ -0,0 +1,49 @@ +import pytest +from pinecone.utils.plugin_aware import PluginAware +from pinecone.config import Config +from pinecone.openapi_support.configuration import Configuration as OpenApiConfig + + +class TestPluginAware: + def test_errors_when_required_attributes_are_missing(self): + class Foo(PluginAware): + def __init__(self): + # does not set config, openapi_config, or pool_threads + super().__init__() + + with pytest.raises(AttributeError) as e: + Foo() + + assert "config" in str(e.value) + assert "openapi_config" in str(e.value) + assert "pool_threads" in str(e.value) + + def test_correctly_raise_attribute_errors(self): + class Foo(PluginAware): + def __init__(self): + self.config = Config() + self.openapi_config = OpenApiConfig() + self.pool_threads = 1 + + super().__init__() + + foo = Foo() + + with pytest.raises(AttributeError) as e: + foo.bar() + + assert "bar" in str(e.value) + + def test_plugins_are_lazily_loaded(self): + class Pinecone(PluginAware): + def __init__(self): + self.config = Config() + self.openapi_config = OpenApiConfig() + self.pool_threads = 10 + + super().__init__() + + pc = Pinecone() + assert "assistant" not in dir(pc) + + assert pc.assistant is not None From 79c73a851a8a8691602977017d294faddfa753e3 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Mon, 14 Apr 2025 11:32:28 -0400 Subject: [PATCH 10/48] Add assistant plugin to dev deps --- poetry.lock | 25 ++++++++++++++++++++----- pyproject.toml | 1 + 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 427dc1e2..fb037257 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1001,13 +1001,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -1130,6 +1130,21 @@ pygments = ">=2.12.0" [package.extras] dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] +[[package]] +name = "pinecone-plugin-assistant" +version = "1.6.0" +description = "Assistant plugin for Pinecone SDK" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "pinecone_plugin_assistant-1.6.0-py3-none-any.whl", hash = "sha256:d742273d136fba66d020f1af01af2c6bfbc802f7ff9ddf46c590b7ea26932175"}, + {file = "pinecone_plugin_assistant-1.6.0.tar.gz", hash = "sha256:b7c531743f87269ba567dd6084b1464b62636a011564d414bc53147571b2f2c1"}, +] + +[package.dependencies] +packaging = ">=24.2,<25.0" +requests = ">=2.32.3,<3.0.0" + [[package]] name = "pinecone-plugin-interface" version = "0.0.7" @@ -1899,4 +1914,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "8a10046c5826a9773836e6b3ee50271bb0077d0faf32d709f1e65c4bb1fc53ea" +content-hash = "6e2107c224f622bcd0492b87d8a92f36318d9487af485e766b0e944e378e083a" diff --git a/pyproject.toml b/pyproject.toml index 0525d08d..ff491308 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,6 +94,7 @@ urllib3_mock = "0.3.3" responses = ">=0.8.1" ruff = "^0.9.3" beautifulsoup4 = "^4.13.3" +pinecone-plugin-assistant = "^1.6.0" [tool.poetry.extras] From 7933e801fc7f0d5f2b926fd71cdf0d5af8e5fbd0 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 8 Apr 2025 10:23:05 -0400 Subject: [PATCH 11/48] Refactoring --- pinecone/control/db_control.py | 54 ++++ pinecone/control/db_control_asyncio.py | 55 ++++ ...erface.py => legacy_pinecone_interface.py} | 86 +++--- pinecone/control/pinecone.py | 250 +++++++----------- pinecone/control/pinecone_asyncio.py | 216 ++++++--------- .../control/pinecone_interface_asyncio.py | 89 ++++--- pinecone/control/resources/__init__.py | 2 + pinecone/control/resources/collection.py | 27 ++ pinecone/control/resources/index.py | 185 +++++++++++++ .../control/resources_asyncio/collection.py | 32 +++ pinecone/control/resources_asyncio/index.py | 173 ++++++++++++ poetry.lock | 50 +++- pyproject.toml | 2 + 13 files changed, 837 insertions(+), 384 deletions(-) create mode 100644 pinecone/control/db_control.py create mode 100644 pinecone/control/db_control_asyncio.py rename pinecone/control/{pinecone_interface.py => legacy_pinecone_interface.py} (95%) create mode 100644 pinecone/control/resources/__init__.py create mode 100644 pinecone/control/resources/collection.py create mode 100644 pinecone/control/resources/index.py create mode 100644 pinecone/control/resources_asyncio/collection.py create mode 100644 pinecone/control/resources_asyncio/index.py diff --git a/pinecone/control/db_control.py b/pinecone/control/db_control.py new file mode 100644 index 00000000..ca9f54d5 --- /dev/null +++ b/pinecone/control/db_control.py @@ -0,0 +1,54 @@ +import logging +from typing import Optional, TYPE_CHECKING + +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi +from pinecone.openapi_support.api_client import ApiClient + +from pinecone.utils import setup_openapi_client +from pinecone.core.openapi.db_control import API_VERSION + + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + from .resources.index import IndexResource + from .resources.collection import CollectionResource + + +class DBControl: + def __init__(self, config, openapi_config, pool_threads): + self.config = config + """ @private """ + + self.index_api = setup_openapi_client( + api_client_klass=ApiClient, + api_klass=ManageIndexesApi, + config=self.config, + openapi_config=self.openapi_config, + pool_threads=pool_threads, + api_version=API_VERSION, + ) + """ @private """ + + self._index_resource: Optional["IndexResource"] = None + """ @private """ + + self._collection_resource: Optional["CollectionResource"] = None + """ @private """ + + @property + def index(self) -> "IndexResource": + if self._index_resource is None: + from .resources.index import IndexResource + + self._index_resource = IndexResource(index_api=self.index_api, config=self.config) + return self._index_resource + + @property + def collection(self) -> "CollectionResource": + if self._collection_resource is None: + from .resources.collection import CollectionResource + + self._collection_resource = CollectionResource(self.index_api) + return self._collection_resource diff --git a/pinecone/control/db_control_asyncio.py b/pinecone/control/db_control_asyncio.py new file mode 100644 index 00000000..3f10df6b --- /dev/null +++ b/pinecone/control/db_control_asyncio.py @@ -0,0 +1,55 @@ +import logging +from typing import Optional, TYPE_CHECKING + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.openapi_support import AsyncioApiClient + +from pinecone.utils import setup_async_openapi_client +from pinecone.core.openapi.db_control import API_VERSION + +logger = logging.getLogger(__name__) +""" @private """ + + +if TYPE_CHECKING: + from .resources_asyncio.index import IndexResourceAsyncio + from .resources_asyncio.collection import CollectionResourceAsyncio + + +class DBControlAsyncio: + def __init__(self, config, openapi_config, pool_threads): + self.config = config + """ @private """ + + self.index_api = setup_async_openapi_client( + api_client_klass=AsyncioApiClient, + api_klass=AsyncioManageIndexesApi, + config=self.config, + openapi_config=self.openapi_config, + api_version=API_VERSION, + ) + """ @private """ + + self._index_resource: Optional["IndexResourceAsyncio"] = None + """ @private """ + + self._collection_resource: Optional["CollectionResourceAsyncio"] = None + """ @private """ + + @property + def index(self) -> "IndexResourceAsyncio": + if self._index_resource is None: + from .resources_asyncio.index import IndexResourceAsyncio + + self._index_resource = IndexResourceAsyncio( + index_api=self.index_api, config=self.config + ) + return self._index_resource + + @property + def collection(self) -> "CollectionResourceAsyncio": + if self._collection_resource is None: + from .resources_asyncio.collection import CollectionResourceAsyncio + + self._collection_resource = CollectionResourceAsyncio(self.index_api) + return self._collection_resource diff --git a/pinecone/control/pinecone_interface.py b/pinecone/control/legacy_pinecone_interface.py similarity index 95% rename from pinecone/control/pinecone_interface.py rename to pinecone/control/legacy_pinecone_interface.py index c183e611..a6a7779a 100644 --- a/pinecone/control/pinecone_interface.py +++ b/pinecone/control/legacy_pinecone_interface.py @@ -1,30 +1,30 @@ from abc import ABC, abstractmethod -from typing import Optional, Dict, Union - - -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexList, - CollectionList, - IndexModel, - IndexEmbed, -) -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict - - -class PineconeDBControlInterface(ABC): +from typing import Optional, Dict, Union, TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.models import ( + ServerlessSpec, + PodSpec, + IndexList, + CollectionList, + IndexModel, + IndexEmbed, + ) + from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from .types import CreateIndexForModelEmbedTypedDict + + +class LegacyPineconeDBControlInterface(ABC): @abstractmethod def __init__( self, @@ -190,14 +190,16 @@ def __init__( def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int], - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", + vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: + ) -> "IndexModel": """Creates a Pinecone index. :param name: The name of the index to create. Must be unique within your project and @@ -299,13 +301,15 @@ def create_index( def create_index_for_model( self, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", timeout: Optional[int] = None, - ) -> IndexModel: + ) -> "IndexModel": """ :param name: The name of the index to create. Must be unique within your project and cannot be changed once created. Allowed characters are lowercase letters, numbers, @@ -414,7 +418,7 @@ def delete_index(self, name: str, timeout: Optional[int] = None): pass @abstractmethod - def list_indexes(self) -> IndexList: + def list_indexes(self) -> "IndexList": """ :return: Returns an `IndexList` object, which is iterable and contains a list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` @@ -447,7 +451,7 @@ def list_indexes(self) -> IndexList: pass @abstractmethod - def describe_index(self, name: str) -> IndexModel: + def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. :return: Returns an `IndexModel` object @@ -534,8 +538,8 @@ def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): """ @@ -622,7 +626,7 @@ def configure_index( pass @abstractmethod - def create_collection(self, name: str, source: str): + def create_collection(self, name: str, source: str) -> None: """Create a collection from a pod-based index :param name: Name of the collection @@ -631,7 +635,7 @@ def create_collection(self, name: str, source: str): pass @abstractmethod - def list_collections(self) -> CollectionList: + def list_collections(self) -> "CollectionList": """List all collections ```python diff --git a/pinecone/control/pinecone.py b/pinecone/control/pinecone.py index 17b3d953..50937555 100644 --- a/pinecone/control/pinecone.py +++ b/pinecone/control/pinecone.py @@ -1,49 +1,47 @@ -import time import logging -from typing import Optional, Dict, Union +from typing import Optional, Dict, Union, TYPE_CHECKING from multiprocessing import cpu_count -from .index_host_store import IndexHostStore -from .pinecone_interface import PineconeDBControlInterface +from .legacy_pinecone_interface import LegacyPineconeDBControlInterface from pinecone.config import PineconeConfig, ConfigBuilder -from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi -from pinecone.openapi_support.api_client import ApiClient - - -from pinecone.utils import normalize_host, setup_openapi_client, PluginAware -from pinecone.core.openapi.db_control import API_VERSION -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexModel, - IndexList, - CollectionList, - IndexEmbed, -) +from pinecone.utils import normalize_host, PluginAware from .langchain_import_warnings import _build_langchain_attribute_error_message from pinecone.utils import docslinks -from pinecone.data import _Index, _Inference, _IndexAsyncio - -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict -from .request_factory import PineconeDBControlRequestFactory logger = logging.getLogger(__name__) """ @private """ - -class Pinecone(PluginAware, PineconeDBControlInterface): +if TYPE_CHECKING: + from .db_control import DBControl + from pinecone.data import ( + _Index as Index, + _Inference as Inference, + _IndexAsyncio as IndexAsyncio, + ) + from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.models import ( + ServerlessSpec, + PodSpec, + IndexModel, + IndexList, + CollectionList, + IndexEmbed, + ) + from .types import CreateIndexForModelEmbedTypedDict + + +class Pinecone(PluginAware, LegacyPineconeDBControlInterface): """ A client for interacting with Pinecone's vector database. @@ -91,196 +89,126 @@ def __init__( self.pool_threads = pool_threads """ @private """ - self._inference = None # Lazy initialization - """ @private """ - - self.index_api = setup_openapi_client( - api_client_klass=ApiClient, - api_klass=ManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, - pool_threads=pool_threads, - api_version=API_VERSION, - ) + self._inference: Optional["Inference"] = None # Lazy initialization """ @private """ - self.index_host_store = IndexHostStore() + self._db_control: Optional["DBControl"] = None # Lazy initialization """ @private """ # Initialize PluginAware first, which will then call PineconeDBControlInterface.__init__ super().__init__() @property - def inference(self): + def inference(self) -> "Inference": """ Inference is a namespace where an instance of the `pinecone.data.features.inference.inference.Inference` class is lazily created and cached. """ if self._inference is None: + from pinecone.data import _Inference + self._inference = _Inference(config=self.config, openapi_config=self.openapi_config) return self._inference + @property + def db(self) -> "DBControl": + """ + DBControl is a namespace where an instance of the `pinecone.control.db_control.DBControl` class is lazily created and cached. + """ + if self._db_control is None: + from .db_control import DBControl + + self._db_control = DBControl( + config=self.config, + openapi_config=self.openapi_config, + pool_threads=self.pool_threads, + ) + return self._db_control + def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int] = None, - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", + vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_request( + ) -> "IndexModel": + return self.db.index.create( name=name, spec=spec, dimension=dimension, metric=metric, + timeout=timeout, deletion_protection=deletion_protection, vector_type=vector_type, tags=tags, ) - resp = self.index_api.create_index(create_index_request=req) - - if timeout == -1: - return IndexModel(resp) - return self.__poll_describe_index_until_ready(name, timeout) def create_index_for_model( self, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", timeout: Optional[int] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_for_model_request( + ) -> "IndexModel": + return self.db.index.create_for_model( name=name, cloud=cloud, region=region, embed=embed, tags=tags, deletion_protection=deletion_protection, + timeout=timeout, ) - resp = self.index_api.create_index_for_model(req) - - if timeout == -1: - return IndexModel(resp) - return self.__poll_describe_index_until_ready(name, timeout) - - def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): - description = None - - def is_ready() -> bool: - nonlocal description - description = self.describe_index(name=name) - return description.status.ready - - total_wait_time = 0 - if timeout is None: - # Wait indefinitely - while not is_ready(): - logger.debug( - f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." - ) - total_wait_time += 5 - time.sleep(5) - - else: - # Wait for a maximum of timeout seconds - while not is_ready(): - if timeout < 0: - logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] - timeout_msg = ( - f"Please call describe_index() to confirm index status. See docs at {link}" - ) - raise TimeoutError(timeout_msg) - - logger.debug( - f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" - ) - total_wait_time += 5 - time.sleep(5) - timeout -= 5 - - return description def delete_index(self, name: str, timeout: Optional[int] = None): - self.index_api.delete_index(name) - self.index_host_store.delete_host(self.config, name) + return self.db.index.delete(name=name, timeout=timeout) - if timeout == -1: - return - - if timeout is None: - while self.has_index(name): - time.sleep(5) - else: - while self.has_index(name) and timeout >= 0: - time.sleep(5) - timeout -= 5 - if timeout and timeout < 0: - raise ( - TimeoutError( - "Please call the list_indexes API ({}) to confirm if index is deleted".format( - "https://www.pinecone.io/docs/api/operation/list_indexes/" - ) - ) - ) + def list_indexes(self) -> "IndexList": + return self.db.index.list() - def list_indexes(self) -> IndexList: - response = self.index_api.list_indexes() - return IndexList(response) - - def describe_index(self, name: str) -> IndexModel: - api_instance = self.index_api - description = api_instance.describe_index(name) - host = description.host - self.index_host_store.set_host(self.config, name, host) - - return IndexModel(description) + def describe_index(self, name: str) -> "IndexModel": + return self.db.index.describe(name=name) def has_index(self, name: str) -> bool: - if name in self.list_indexes().names(): - return True - else: - return False + return self.db.index.has(name=name) def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): - api_instance = self.index_api - description = self.describe_index(name=name) - - req = PineconeDBControlRequestFactory.configure_index_request( - description=description, + return self.db.index.configure( + name=name, replicas=replicas, pod_type=pod_type, deletion_protection=deletion_protection, tags=tags, ) - api_instance.configure_index(name, configure_index_request=req) def create_collection(self, name: str, source: str) -> None: - req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) - self.index_api.create_collection(create_collection_request=req) + return self.db.collection.create(name=name, source=source) - def list_collections(self) -> CollectionList: - response = self.index_api.list_collections() - return CollectionList(response) + def list_collections(self) -> "CollectionList": + return self.db.collection.list() def delete_collection(self, name: str) -> None: - self.index_api.delete_collection(name) + return self.db.collection.delete(name=name) def describe_collection(self, name: str): - return self.index_api.describe_collection(name).to_dict() + return self.db.collection.describe(name=name) @staticmethod def from_texts(*args, **kwargs): @@ -292,7 +220,9 @@ def from_documents(*args, **kwargs): """@private""" raise AttributeError(_build_langchain_attribute_error_message("from_documents")) - def Index(self, name: str = "", host: str = "", **kwargs): + def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": + from pinecone.data import _Index + if name == "" and host == "": raise ValueError("Either name or host must be specified") @@ -318,7 +248,9 @@ def Index(self, name: str = "", host: str = "", **kwargs): **kwargs, ) - def IndexAsyncio(self, host: str, **kwargs): + def IndexAsyncio(self, host: str, **kwargs) -> "IndexAsyncio": + from pinecone.data import _IndexAsyncio + api_key = self.config.api_key openapi_config = self.openapi_config diff --git a/pinecone/control/pinecone_asyncio.py b/pinecone/control/pinecone_asyncio.py index 1373c8e4..efd5ca5e 100644 --- a/pinecone/control/pinecone_asyncio.py +++ b/pinecone/control/pinecone_asyncio.py @@ -1,40 +1,37 @@ import logging -import asyncio -from typing import Optional, Dict, Union +from typing import Optional, Dict, Union, TYPE_CHECKING from pinecone.config import PineconeConfig, ConfigBuilder -from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi -from pinecone.openapi_support import AsyncioApiClient - -from pinecone.utils import normalize_host, setup_async_openapi_client -from pinecone.core.openapi.db_control import API_VERSION -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexModel, - IndexList, - CollectionList, - IndexEmbed, -) +from pinecone.utils import normalize_host from pinecone.utils import docslinks -from pinecone.data import _IndexAsyncio, _AsyncioInference -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict from .request_factory import PineconeDBControlRequestFactory from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface from .pinecone import check_realistic_host +if TYPE_CHECKING: + from .types import CreateIndexForModelEmbedTypedDict + from pinecone.data import _IndexAsyncio + from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.models import ( + ServerlessSpec, + PodSpec, + IndexModel, + IndexList, + CollectionList, + IndexEmbed, + ) + logger = logging.getLogger(__name__) """ @private """ @@ -102,13 +99,7 @@ def __init__( self._inference = None # Lazy initialization """ @private """ - self.index_api = setup_async_openapi_client( - api_client_klass=AsyncioApiClient, - api_klass=AsyncioManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, - api_version=API_VERSION, - ) + self._db_control = None # Lazy initialization """ @private """ async def __aenter__(self): @@ -158,22 +149,38 @@ async def main(): @property def inference(self): - """Dynamically create and cache the Inference instance.""" + """Dynamically create and cache the AsyncioInference instance.""" if self._inference is None: + from pinecone.data import _AsyncioInference + self._inference = _AsyncioInference(api_client=self.index_api.api_client) return self._inference + @property + def db(self): + if self._db_control is None: + from .db_control_asyncio import DBControlAsyncio + + self._db_control = DBControlAsyncio( + config=self.config, + openapi_config=self.openapi_config, + pool_threads=self.pool_threads, + ) + return self._db_control + async def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int] = None, - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", + vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: + ) -> "IndexModel": req = PineconeDBControlRequestFactory.create_index_request( name=name, spec=spec, @@ -183,145 +190,74 @@ async def create_index( vector_type=vector_type, tags=tags, ) - resp = await self.index_api.create_index(create_index_request=req) - - if timeout == -1: - return IndexModel(resp) - return await self.__poll_describe_index_until_ready(name, timeout) + resp = await self.db.index.create(create_index_request=req) + return resp async def create_index_for_model( self, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", timeout: Optional[int] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_for_model_request( + ) -> "IndexModel": + return await self.db.index.create_for_model( name=name, cloud=cloud, region=region, embed=embed, tags=tags, deletion_protection=deletion_protection, + timeout=timeout, ) - resp = await self.index_api.create_index_for_model(req) - - if timeout == -1: - return IndexModel(resp) - return await self.__poll_describe_index_until_ready(name, timeout) - - async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): - description = None - - async def is_ready() -> bool: - nonlocal description - description = await self.describe_index(name=name) - return description.status.ready - - total_wait_time = 0 - if timeout is None: - # Wait indefinitely - while not await is_ready(): - logger.debug( - f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." - ) - total_wait_time += 5 - await asyncio.sleep(5) - - else: - # Wait for a maximum of timeout seconds - while not await is_ready(): - if timeout < 0: - logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] - timeout_msg = ( - f"Please call describe_index() to confirm index status. See docs at {link}" - ) - raise TimeoutError(timeout_msg) - - logger.debug( - f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" - ) - total_wait_time += 5 - await asyncio.sleep(5) - timeout -= 5 - - return description async def delete_index(self, name: str, timeout: Optional[int] = None): - await self.index_api.delete_index(name) - - if timeout == -1: - return - - if timeout is None: - while await self.has_index(name): - await asyncio.sleep(5) - else: - while await self.has_index(name) and timeout >= 0: - await asyncio.sleep(5) - timeout -= 5 - if timeout and timeout < 0: - raise ( - TimeoutError( - "Please call the list_indexes API ({}) to confirm if index is deleted".format( - "https://www.pinecone.io/docs/api/operation/list_indexes/" - ) - ) - ) + return await self.db.index.delete(name=name, timeout=timeout) - async def list_indexes(self) -> IndexList: - response = await self.index_api.list_indexes() - return IndexList(response) + async def list_indexes(self) -> "IndexList": + return await self.db.index.list() - async def describe_index(self, name: str) -> IndexModel: - description = await self.index_api.describe_index(name) - return IndexModel(description) + async def describe_index(self, name: str) -> "IndexModel": + return await self.db.index.describe(name=name) async def has_index(self, name: str) -> bool: - available_indexes = await self.list_indexes() - if name in available_indexes.names(): - return True - else: - return False + return await self.db.index.has(name=name) async def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): - description = await self.describe_index(name=name) - - req = PineconeDBControlRequestFactory.configure_index_request( - description=description, + return await self.db.index.configure( + name=name, replicas=replicas, pod_type=pod_type, deletion_protection=deletion_protection, tags=tags, ) - await self.index_api.configure_index(name, configure_index_request=req) async def create_collection(self, name: str, source: str): - req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) - await self.index_api.create_collection(create_collection_request=req) + return await self.db.collection.create(name=name, source=source) - async def list_collections(self) -> CollectionList: - response = await self.index_api.list_collections() - return CollectionList(response) + async def list_collections(self) -> "CollectionList": + return await self.db.collection.list() async def delete_collection(self, name: str): - await self.index_api.delete_collection(name) + return await self.db.collection.delete(name=name) async def describe_collection(self, name: str): - return await self.index_api.describe_collection(name).to_dict() + return await self.db.collection.describe(name=name) + + def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": + from pinecone.data import _IndexAsyncio - def IndexAsyncio(self, host: str, **kwargs) -> _IndexAsyncio: api_key = self.config.api_key openapi_config = self.openapi_config diff --git a/pinecone/control/pinecone_interface_asyncio.py b/pinecone/control/pinecone_interface_asyncio.py index a732bce9..139af7a5 100644 --- a/pinecone/control/pinecone_interface_asyncio.py +++ b/pinecone/control/pinecone_interface_asyncio.py @@ -1,32 +1,31 @@ from abc import ABC, abstractmethod -from typing import Optional, Dict, Union - - -from pinecone.config import Config - -from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi - - -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexList, - CollectionList, - IndexModel, - IndexEmbed, -) -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict +from typing import Optional, Dict, Union, TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.config import Config + + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + + from pinecone.models import ( + ServerlessSpec, + PodSpec, + IndexList, + CollectionList, + IndexModel, + IndexEmbed, + ) + from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from .types import CreateIndexForModelEmbedTypedDict class PineconeAsyncioDBControlInterface(ABC): @@ -39,10 +38,10 @@ def __init__( proxy_headers: Optional[Dict[str, str]] = None, ssl_ca_certs: Optional[str] = None, ssl_verify: Optional[bool] = None, - config: Optional[Config] = None, + config: Optional["Config"] = None, additional_headers: Optional[Dict[str, str]] = {}, pool_threads: Optional[int] = 1, - index_api: Optional[ManageIndexesApi] = None, + index_api: Optional["ManageIndexesApi"] = None, **kwargs, ): """ @@ -291,12 +290,14 @@ async def main(): async def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int], - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", + vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, ): """Creates a Pinecone index. @@ -408,13 +409,15 @@ async def main(): async def create_index_for_model( self, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", timeout: Optional[int] = None, - ) -> IndexModel: + ) -> "IndexModel": """ :param name: The name of the index to create. Must be unique within your project and cannot be changed once created. Allowed characters are lowercase letters, numbers, @@ -533,7 +536,7 @@ async def main(): pass @abstractmethod - async def list_indexes(self) -> IndexList: + async def list_indexes(self) -> "IndexList": """ :return: Returns an `IndexList` object, which is iterable and contains a list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` @@ -574,7 +577,7 @@ async def main(): pass @abstractmethod - async def describe_index(self, name: str) -> IndexModel: + async def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. :return: Returns an `IndexModel` object @@ -669,8 +672,8 @@ async def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): """ @@ -779,7 +782,7 @@ async def create_collection(self, name: str, source: str): pass @abstractmethod - async def list_collections(self) -> CollectionList: + async def list_collections(self) -> "CollectionList": """List all collections ```python diff --git a/pinecone/control/resources/__init__.py b/pinecone/control/resources/__init__.py new file mode 100644 index 00000000..cc904d53 --- /dev/null +++ b/pinecone/control/resources/__init__.py @@ -0,0 +1,2 @@ +from .index import IndexResource +from .collection import CollectionResource diff --git a/pinecone/control/resources/collection.py b/pinecone/control/resources/collection.py new file mode 100644 index 00000000..dba438b1 --- /dev/null +++ b/pinecone/control/resources/collection.py @@ -0,0 +1,27 @@ +import logging + +from pinecone.models import CollectionList +from ..request_factory import PineconeDBControlRequestFactory + +logger = logging.getLogger(__name__) +""" @private """ + + +class CollectionResource: + def __init__(self, index_api): + self.index_api = index_api + """ @private """ + + def create(self, name: str, source: str) -> None: + req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) + self.index_api.create_collection(create_collection_request=req) + + def list(self) -> CollectionList: + response = self.index_api.list_collections() + return CollectionList(response) + + def delete(self, name: str) -> None: + self.index_api.delete_collection(name) + + def describe(self, name: str): + return self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/control/resources/index.py b/pinecone/control/resources/index.py new file mode 100644 index 00000000..130b8e4f --- /dev/null +++ b/pinecone/control/resources/index.py @@ -0,0 +1,185 @@ +import time +import logging +from typing import Optional, Dict, Union + +from ..index_host_store import IndexHostStore + +from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.utils import docslinks + +from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, +) +from ..types import CreateIndexForModelEmbedTypedDict +from ..request_factory import PineconeDBControlRequestFactory + +logger = logging.getLogger(__name__) +""" @private """ + + +class IndexResource: + def __init__(self, index_api, config): + self.index_api = index_api + """ @private """ + + self.config = config + """ @private """ + + self.index_host_store = IndexHostStore() + """ @private """ + + def create( + self, + name: str, + spec: Union[Dict, ServerlessSpec, PodSpec], + dimension: Optional[int] = None, + metric: Optional[Union[Metric, str]] = Metric.COSINE, + timeout: Optional[int] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + tags: Optional[Dict[str, str]] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_request( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + ) + resp = self.index_api.create_index(create_index_request=req) + + if timeout == -1: + return IndexModel(resp) + return self.__poll_describe_index_until_ready(name, timeout) + + def create_for_model( + self, + name: str, + cloud: Union[CloudProvider, str], + region: Union[AwsRegion, GcpRegion, AzureRegion, str], + embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_for_model_request( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + ) + resp = self.index_api.create_index_for_model(req) + + if timeout == -1: + return IndexModel(resp) + return self.__poll_describe_index_until_ready(name, timeout) + + def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + description = None + + def is_ready() -> bool: + nonlocal description + description = self.describe(name=name) + return description.status.ready + + total_wait_time = 0 + if timeout is None: + # Wait indefinitely + while not is_ready(): + logger.debug( + f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." + ) + total_wait_time += 5 + time.sleep(5) + + else: + # Wait for a maximum of timeout seconds + while not is_ready(): + if timeout < 0: + logger.error(f"Index {name} is not ready. Timeout reached.") + link = docslinks["API_DESCRIBE_INDEX"] + timeout_msg = ( + f"Please call describe_index() to confirm index status. See docs at {link}" + ) + raise TimeoutError(timeout_msg) + + logger.debug( + f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" + ) + total_wait_time += 5 + time.sleep(5) + timeout -= 5 + + return description + + def delete(self, name: str, timeout: Optional[int] = None): + self.index_api.delete_index(name) + self.index_host_store.delete_host(self.config, name) + + if timeout == -1: + return + + if timeout is None: + while self.has_index(name): + time.sleep(5) + else: + while self.has_index(name) and timeout >= 0: + time.sleep(5) + timeout -= 5 + if timeout and timeout < 0: + raise ( + TimeoutError( + "Please call the list_indexes API ({}) to confirm if index is deleted".format( + "https://www.pinecone.io/docs/api/operation/list_indexes/" + ) + ) + ) + + def list(self) -> IndexList: + response = self.index_api.list_indexes() + return IndexList(response) + + def describe(self, name: str) -> IndexModel: + api_instance = self.index_api + description = api_instance.describe_index(name) + host = description.host + self.index_host_store.set_host(self.config, name, host) + + return IndexModel(description) + + def has(self, name: str) -> bool: + if name in self.list().names(): + return True + else: + return False + + def configure( + self, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union[PodType, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = None, + tags: Optional[Dict[str, str]] = None, + ): + api_instance = self.index_api + description = self.describe(name=name) + + req = PineconeDBControlRequestFactory.configure_index_request( + description=description, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + api_instance.configure_index(name, configure_index_request=req) diff --git a/pinecone/control/resources_asyncio/collection.py b/pinecone/control/resources_asyncio/collection.py new file mode 100644 index 00000000..42d430eb --- /dev/null +++ b/pinecone/control/resources_asyncio/collection.py @@ -0,0 +1,32 @@ +import logging +from typing import TYPE_CHECKING + + +from pinecone.models import CollectionList + +from ..request_factory import PineconeDBControlRequestFactory + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + pass + + +class CollectionResourceAsyncio: + def __init__(self, index_api): + self.index_api = index_api + + async def create(self, name: str, source: str): + req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) + await self.index_api.create_collection(create_collection_request=req) + + async def list(self) -> CollectionList: + response = await self.index_api.list_collections() + return CollectionList(response) + + async def delete(self, name: str): + await self.index_api.delete_collection(name) + + async def describe(self, name: str): + return await self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/control/resources_asyncio/index.py b/pinecone/control/resources_asyncio/index.py new file mode 100644 index 00000000..17faffc5 --- /dev/null +++ b/pinecone/control/resources_asyncio/index.py @@ -0,0 +1,173 @@ +import logging +import asyncio +from typing import Optional, Dict, Union + + +from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.utils import docslinks + +from pinecone.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, +) +from .types import CreateIndexForModelEmbedTypedDict +from .request_factory import PineconeDBControlRequestFactory + +logger = logging.getLogger(__name__) +""" @private """ + + +class IndexResourceAsyncio: + def __init__(self, index_api, config): + self.index_api = index_api + self.config = config + + async def create( + self, + name: str, + spec: Union[Dict, ServerlessSpec, PodSpec], + dimension: Optional[int] = None, + metric: Optional[Union[Metric, str]] = Metric.COSINE, + timeout: Optional[int] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + tags: Optional[Dict[str, str]] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_request( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + ) + resp = await self.index_api.create_index(create_index_request=req) + + if timeout == -1: + return IndexModel(resp) + return await self.__poll_describe_index_until_ready(name, timeout) + + async def create_for_model( + self, + name: str, + cloud: Union[CloudProvider, str], + region: Union[AwsRegion, GcpRegion, AzureRegion, str], + embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_for_model_request( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + ) + resp = await self.index_api.create_index_for_model(req) + + if timeout == -1: + return IndexModel(resp) + return await self.__poll_describe_index_until_ready(name, timeout) + + async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + description = None + + async def is_ready() -> bool: + nonlocal description + description = await self.describe(name=name) + return description.status.ready + + total_wait_time = 0 + if timeout is None: + # Wait indefinitely + while not await is_ready(): + logger.debug( + f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." + ) + total_wait_time += 5 + await asyncio.sleep(5) + + else: + # Wait for a maximum of timeout seconds + while not await is_ready(): + if timeout < 0: + logger.error(f"Index {name} is not ready. Timeout reached.") + link = docslinks["API_DESCRIBE_INDEX"] + timeout_msg = ( + f"Please call describe_index() to confirm index status. See docs at {link}" + ) + raise TimeoutError(timeout_msg) + + logger.debug( + f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" + ) + total_wait_time += 5 + await asyncio.sleep(5) + timeout -= 5 + + return description + + async def delete(self, name: str, timeout: Optional[int] = None): + await self.index_api.delete_index(name) + + if timeout == -1: + return + + if timeout is None: + while await self.has(name): + await asyncio.sleep(5) + else: + while await self.has(name) and timeout >= 0: + await asyncio.sleep(5) + timeout -= 5 + if timeout and timeout < 0: + raise ( + TimeoutError( + "Please call the list_indexes API ({}) to confirm if index is deleted".format( + "https://www.pinecone.io/docs/api/operation/list_indexes/" + ) + ) + ) + + async def list(self) -> IndexList: + response = await self.index_api.list_indexes() + return IndexList(response) + + async def describe(self, name: str) -> IndexModel: + description = await self.index_api.describe_index(name) + return IndexModel(description) + + async def has(self, name: str) -> bool: + available_indexes = await self.list() + if name in available_indexes.names(): + return True + else: + return False + + async def configure( + self, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union[PodType, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = None, + tags: Optional[Dict[str, str]] = None, + ): + description = await self.describe(name=name) + + req = PineconeDBControlRequestFactory.configure_index_request( + description=description, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + await self.index_api.configure_index(name, configure_index_request=req) diff --git a/poetry.lock b/poetry.lock index fb037257..048d84c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1347,6 +1347,29 @@ files = [ googleapis-common-protos = "*" protobuf = ">=4.21.0" +[[package]] +name = "psutil" +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[package.extras] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -1683,6 +1706,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tuna" +version = "0.5.11" +description = "Visualize Python performance profiles" +optional = false +python-versions = ">=3.6" +files = [ + {file = "tuna-0.5.11-py3-none-any.whl", hash = "sha256:ab352a6d836014ace585ecd882148f1f7c68be9ea4bf9e9298b7127594dab2ef"}, + {file = "tuna-0.5.11.tar.gz", hash = "sha256:d47f3e39e80af961c8df016ac97d1643c3c60b5eb451299da0ab5fe411d8866c"}, +] + [[package]] name = "types-protobuf" version = "4.24.0.4" @@ -1811,6 +1845,20 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "vprof" +version = "0.38" +description = "Visual profiler for Python" +optional = false +python-versions = "*" +files = [ + {file = "vprof-0.38-py3-none-any.whl", hash = "sha256:91b91d8868176c29e0fe3426c9239d11cd192c7144c7baf26a211e48923a5ee8"}, + {file = "vprof-0.38.tar.gz", hash = "sha256:7f1000912eeb7a450c7c94d3cc96739af45ad0ff01d5abcc0b09a175d40ffadb"}, +] + +[package.dependencies] +psutil = ">=3" + [[package]] name = "yarl" version = "1.17.2" @@ -1914,4 +1962,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "6e2107c224f622bcd0492b87d8a92f36318d9487af485e766b0e944e378e083a" +content-hash = "96c8c770a4626bc9606a7b8e16537e217f238e20c217baa1206f4ef9debe5e82" diff --git a/pyproject.toml b/pyproject.toml index ff491308..5b8a11ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,6 +95,8 @@ responses = ">=0.8.1" ruff = "^0.9.3" beautifulsoup4 = "^4.13.3" pinecone-plugin-assistant = "^1.6.0" +vprof = "^0.38" +tuna = "^0.5.11" [tool.poetry.extras] From 2c6e1ce52b8d94376be62ba7b4c5bc1ecb99e979 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 8 Apr 2025 10:40:29 -0400 Subject: [PATCH 12/48] Refactoring --- pinecone/control/pinecone.py | 2 +- pinecone/control/resources/index.py | 10 ++++++++-- pinecone/control/resources_asyncio/index.py | 4 ++-- pinecone/grpc/pinecone.py | 2 +- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/pinecone/control/pinecone.py b/pinecone/control/pinecone.py index 50937555..87ae5b9d 100644 --- a/pinecone/control/pinecone.py +++ b/pinecone/control/pinecone.py @@ -237,7 +237,7 @@ def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": index_host = normalize_host(host) else: # Otherwise, get host url from describe_index using the index name - index_host = self.index_host_store.get_host(self.index_api, self.config, name) + index_host = self.db.index._get_host(name) return _Index( host=index_host, diff --git a/pinecone/control/resources/index.py b/pinecone/control/resources/index.py index 130b8e4f..df6fd203 100644 --- a/pinecone/control/resources/index.py +++ b/pinecone/control/resources/index.py @@ -131,10 +131,10 @@ def delete(self, name: str, timeout: Optional[int] = None): return if timeout is None: - while self.has_index(name): + while self.has(name): time.sleep(5) else: - while self.has_index(name) and timeout >= 0: + while self.has(name) and timeout >= 0: time.sleep(5) timeout -= 5 if timeout and timeout < 0: @@ -183,3 +183,9 @@ def configure( tags=tags, ) api_instance.configure_index(name, configure_index_request=req) + + def _get_host(self, name: str) -> str: + """@private""" + return self.index_host_store.get_host( + api=self.index_api, config=self.config, index_name=name + ) diff --git a/pinecone/control/resources_asyncio/index.py b/pinecone/control/resources_asyncio/index.py index 17faffc5..d1bc037f 100644 --- a/pinecone/control/resources_asyncio/index.py +++ b/pinecone/control/resources_asyncio/index.py @@ -16,8 +16,8 @@ GcpRegion, AzureRegion, ) -from .types import CreateIndexForModelEmbedTypedDict -from .request_factory import PineconeDBControlRequestFactory +from pinecone.control.types import CreateIndexForModelEmbedTypedDict +from pinecone.control.request_factory import PineconeDBControlRequestFactory logger = logging.getLogger(__name__) """ @private """ diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index c78481ff..b03b139a 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -122,7 +122,7 @@ def Index(self, name: str = "", host: str = "", **kwargs): raise ValueError("Either name or host must be specified") # Use host if it is provided, otherwise get host from describe_index - index_host = host or self.index_host_store.get_host(self.index_api, self.config, name) + index_host = host or self.db.index._get_host(name) pt = kwargs.pop("pool_threads", None) or self.pool_threads From a08ae7348ea40ee9c6b660d6070eb574785e2aae Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 9 Apr 2025 13:07:12 -0400 Subject: [PATCH 13/48] WIP --- pinecone/__init__.py | 10 +- pinecone/config/__init__.py | 1 + pinecone/config/config.py | 14 +- .../{openapi.py => openapi_config_factory.py} | 13 +- pinecone/config/openapi_configuration.py | 441 +++++++++++++++++ pinecone/control/__init__.py | 6 - pinecone/core/openapi/db_control/__init__.py | 2 +- pinecone/core/openapi/db_data/__init__.py | 2 +- pinecone/core/openapi/inference/__init__.py | 2 +- pinecone/data/__init__.py | 34 -- pinecone/db_control/__init__.py | 7 + .../{control => db_control}/db_control.py | 14 +- .../db_control_asyncio.py | 8 +- pinecone/{ => db_control}/enums/__init__.py | 0 pinecone/{ => db_control}/enums/clouds.py | 0 .../enums/deletion_protection.py | 0 pinecone/{ => db_control}/enums/metric.py | 0 .../enums/pod_index_environment.py | 0 pinecone/{ => db_control}/enums/pod_type.py | 0 .../{ => db_control}/enums/vector_type.py | 0 .../index_host_store.py | 0 pinecone/{ => db_control}/models/__init__.py | 2 +- .../models/collection_description.py | 0 .../models/collection_list.py | 0 .../models/index_description.py | 0 .../{ => db_control}/models/index_list.py | 0 .../{ => db_control}/models/index_model.py | 2 +- .../{ => db_control}/models/list_response.py | 0 pinecone/{ => db_control}/models/pod_spec.py | 0 .../models/serverless_spec.py | 0 .../{control => db_control}/repr_overrides.py | 4 +- .../request_factory.py | 4 +- .../resources}/__init__.py | 0 .../db_control/resources/asyncio/__init__.py | 2 + .../resources/asyncio}/collection.py | 4 +- .../resources/asyncio}/index.py | 12 +- .../resources/sync}/__init__.py | 0 .../resources/sync}/collection.py | 4 +- .../resources/sync}/index.py | 13 +- .../{control => db_control}/types/__init__.py | 0 .../types/create_index_for_model_embed.py | 4 +- pinecone/db_data/__init__.py | 53 +++ .../{data => db_data}/dataclasses/__init__.py | 0 .../dataclasses/fetch_response.py | 0 .../dataclasses/search_query.py | 0 .../dataclasses/search_query_vector.py | 0 .../dataclasses/search_rerank.py | 0 .../dataclasses/sparse_values.py | 0 .../{data => db_data}/dataclasses/utils.py | 0 .../{data => db_data}/dataclasses/vector.py | 0 pinecone/{data => db_data}/errors.py | 0 pinecone/db_data/features/__init__.py | 13 + .../features/bulk_import/__init__.py | 0 .../features/bulk_import/bulk_import.py | 0 .../bulk_import/bulk_import_asyncio.py | 0 .../bulk_import_request_factory.py | 0 .../db_data/features/inference/__init__.py | 11 + pinecone/{data => db_data}/import_error.py | 0 pinecone/{data => db_data}/index.py | 0 pinecone/{data => db_data}/index_asyncio.py | 0 .../index_asyncio_interface.py | 0 pinecone/{data => db_data}/interfaces.py | 0 .../query_results_aggregator.py | 0 pinecone/{data => db_data}/request_factory.py | 0 .../sparse_values_factory.py | 0 pinecone/{data => db_data}/types/__init__.py | 0 .../{data => db_data}/types/query_filter.py | 0 .../types/search_query_typed_dict.py | 0 .../types/search_query_vector_typed_dict.py | 0 .../types/search_rerank_typed_dict.py | 0 .../types/sparse_vector_typed_dict.py | 0 .../types/vector_metadata_dict.py | 0 .../{data => db_data}/types/vector_tuple.py | 0 .../types/vector_typed_dict.py | 0 pinecone/{data => db_data}/vector_factory.py | 0 pinecone/exceptions/__init__.py | 6 +- pinecone/exceptions/exceptions.py | 141 +++++- pinecone/grpc/__init__.py | 2 +- pinecone/grpc/index_grpc.py | 6 +- pinecone/grpc/pinecone.py | 4 +- pinecone/grpc/sparse_values_factory.py | 4 +- pinecone/grpc/utils.py | 2 +- pinecone/grpc/vector_factory_grpc.py | 4 +- .../{data/features => }/inference/__init__.py | 0 .../features => }/inference/inference.py | 0 .../inference/inference_asyncio.py | 0 .../inference/inference_request_builder.py | 0 .../inference/models/__init__.py | 0 .../inference/models/embedding_list.py | 0 .../{ => inference}/models/index_embed.py | 4 +- .../inference/models/rerank_result.py | 0 .../features => }/inference/repl_overrides.py | 0 .../langchain_import_warnings.py | 0 .../legacy_pinecone_interface.py | 6 +- pinecone/openapi_support/__init__.py | 2 +- pinecone/openapi_support/api_client.py | 21 +- .../openapi_support/asyncio_api_client.py | 2 +- pinecone/openapi_support/configuration.py | 442 +----------------- .../openapi_support/configuration_lazy.py | 7 + pinecone/openapi_support/endpoint_utils.py | 2 +- pinecone/openapi_support/exceptions.py | 141 +----- pinecone/openapi_support/model_utils.py | 3 +- pinecone/openapi_support/rest_aiohttp.py | 2 +- pinecone/openapi_support/rest_urllib3.py | 2 +- pinecone/{control => }/pinecone.py | 24 +- pinecone/{control => }/pinecone_asyncio.py | 31 +- .../pinecone_interface_asyncio.py | 6 +- pinecone/utils/docslinks.py | 7 +- pinecone/utils/plugin_aware.py | 2 +- .../data/test_query_namespaces_sparse.py | 2 +- .../data/test_search_and_upsert_records.py | 2 +- .../data/test_upsert_from_dataframe.py | 2 +- tests/integration/data_asyncio/conftest.py | 2 +- tests/integration/helpers/helpers.py | 2 +- tests/perf/test_query_results_aggregator.py | 2 +- tests/unit/data/test_bulk_import.py | 2 +- tests/unit/data/test_request_factory.py | 2 +- tests/unit/data/test_vector_factory.py | 2 +- tests/unit/test_config.py | 2 +- tests/unit/test_config_builder.py | 2 +- tests/unit/test_index.py | 2 +- tests/unit/test_query_results_aggregator.py | 2 +- tests/upgrade/test_v6_upgrade.py | 174 +++++++ 123 files changed, 1000 insertions(+), 755 deletions(-) rename pinecone/config/{openapi.py => openapi_config_factory.py} (92%) create mode 100644 pinecone/config/openapi_configuration.py delete mode 100644 pinecone/control/__init__.py delete mode 100644 pinecone/data/__init__.py create mode 100644 pinecone/db_control/__init__.py rename pinecone/{control => db_control}/db_control.py (79%) rename pinecone/{control => db_control}/db_control_asyncio.py (86%) rename pinecone/{ => db_control}/enums/__init__.py (100%) rename pinecone/{ => db_control}/enums/clouds.py (100%) rename pinecone/{ => db_control}/enums/deletion_protection.py (100%) rename pinecone/{ => db_control}/enums/metric.py (100%) rename pinecone/{ => db_control}/enums/pod_index_environment.py (100%) rename pinecone/{ => db_control}/enums/pod_type.py (100%) rename pinecone/{ => db_control}/enums/vector_type.py (100%) rename pinecone/{control => db_control}/index_host_store.py (100%) rename pinecone/{ => db_control}/models/__init__.py (90%) rename pinecone/{ => db_control}/models/collection_description.py (100%) rename pinecone/{ => db_control}/models/collection_list.py (100%) rename pinecone/{ => db_control}/models/index_description.py (100%) rename pinecone/{ => db_control}/models/index_list.py (100%) rename pinecone/{ => db_control}/models/index_model.py (81%) rename pinecone/{ => db_control}/models/list_response.py (100%) rename pinecone/{ => db_control}/models/pod_spec.py (100%) rename pinecone/{ => db_control}/models/serverless_spec.py (100%) rename pinecone/{control => db_control}/repr_overrides.py (79%) rename pinecone/{control => db_control}/request_factory.py (98%) rename pinecone/{data/features => db_control/resources}/__init__.py (100%) create mode 100644 pinecone/db_control/resources/asyncio/__init__.py rename pinecone/{control/resources_asyncio => db_control/resources/asyncio}/collection.py (85%) rename pinecone/{control/resources_asyncio => db_control/resources/asyncio}/index.py (93%) rename pinecone/{control/resources => db_control/resources/sync}/__init__.py (100%) rename pinecone/{control/resources => db_control/resources/sync}/collection.py (84%) rename pinecone/{control/resources => db_control/resources/sync}/index.py (92%) rename pinecone/{control => db_control}/types/__init__.py (100%) rename pinecone/{control => db_control}/types/create_index_for_model_embed.py (72%) create mode 100644 pinecone/db_data/__init__.py rename pinecone/{data => db_data}/dataclasses/__init__.py (100%) rename pinecone/{data => db_data}/dataclasses/fetch_response.py (100%) rename pinecone/{data => db_data}/dataclasses/search_query.py (100%) rename pinecone/{data => db_data}/dataclasses/search_query_vector.py (100%) rename pinecone/{data => db_data}/dataclasses/search_rerank.py (100%) rename pinecone/{data => db_data}/dataclasses/sparse_values.py (100%) rename pinecone/{data => db_data}/dataclasses/utils.py (100%) rename pinecone/{data => db_data}/dataclasses/vector.py (100%) rename pinecone/{data => db_data}/errors.py (100%) create mode 100644 pinecone/db_data/features/__init__.py rename pinecone/{data => db_data}/features/bulk_import/__init__.py (100%) rename pinecone/{data => db_data}/features/bulk_import/bulk_import.py (100%) rename pinecone/{data => db_data}/features/bulk_import/bulk_import_asyncio.py (100%) rename pinecone/{data => db_data}/features/bulk_import/bulk_import_request_factory.py (100%) create mode 100644 pinecone/db_data/features/inference/__init__.py rename pinecone/{data => db_data}/import_error.py (100%) rename pinecone/{data => db_data}/index.py (100%) rename pinecone/{data => db_data}/index_asyncio.py (100%) rename pinecone/{data => db_data}/index_asyncio_interface.py (100%) rename pinecone/{data => db_data}/interfaces.py (100%) rename pinecone/{data => db_data}/query_results_aggregator.py (100%) rename pinecone/{data => db_data}/request_factory.py (100%) rename pinecone/{data => db_data}/sparse_values_factory.py (100%) rename pinecone/{data => db_data}/types/__init__.py (100%) rename pinecone/{data => db_data}/types/query_filter.py (100%) rename pinecone/{data => db_data}/types/search_query_typed_dict.py (100%) rename pinecone/{data => db_data}/types/search_query_vector_typed_dict.py (100%) rename pinecone/{data => db_data}/types/search_rerank_typed_dict.py (100%) rename pinecone/{data => db_data}/types/sparse_vector_typed_dict.py (100%) rename pinecone/{data => db_data}/types/vector_metadata_dict.py (100%) rename pinecone/{data => db_data}/types/vector_tuple.py (100%) rename pinecone/{data => db_data}/types/vector_typed_dict.py (100%) rename pinecone/{data => db_data}/vector_factory.py (100%) rename pinecone/{data/features => }/inference/__init__.py (100%) rename pinecone/{data/features => }/inference/inference.py (100%) rename pinecone/{data/features => }/inference/inference_asyncio.py (100%) rename pinecone/{data/features => }/inference/inference_request_builder.py (100%) rename pinecone/{data/features => }/inference/models/__init__.py (100%) rename pinecone/{data/features => }/inference/models/embedding_list.py (100%) rename pinecone/{ => inference}/models/index_embed.py (94%) rename pinecone/{data/features => }/inference/models/rerank_result.py (100%) rename pinecone/{data/features => }/inference/repl_overrides.py (100%) rename pinecone/{control => }/langchain_import_warnings.py (100%) rename pinecone/{control => }/legacy_pinecone_interface.py (99%) create mode 100644 pinecone/openapi_support/configuration_lazy.py rename pinecone/{control => }/pinecone.py (93%) rename pinecone/{control => }/pinecone_asyncio.py (91%) rename pinecone/{control => }/pinecone_interface_asyncio.py (99%) create mode 100644 tests/upgrade/test_v6_upgrade.py diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 13a65bd1..4af444d7 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -4,12 +4,12 @@ from .deprecated_plugins import check_for_deprecated_plugins from .deprecation_warnings import * -from .config import * +from .pinecone import Pinecone +from .pinecone_asyncio import PineconeAsyncio from .exceptions import * -from .control import * -from .data import * -from .models import * -from .enums import * +# from .config import * +# from .db_control import * +# from .db_data import * from .utils import __version__ diff --git a/pinecone/config/__init__.py b/pinecone/config/__init__.py index 7abb7278..f292622f 100644 --- a/pinecone/config/__init__.py +++ b/pinecone/config/__init__.py @@ -2,6 +2,7 @@ import os from .config import ConfigBuilder, Config +from .openapi_configuration import Configuration as OpenApiConfiguration from .pinecone_config import PineconeConfig if os.getenv("PINECONE_DEBUG") is not None: diff --git a/pinecone/config/config.py b/pinecone/config/config.py index 01a703e0..86c03649 100644 --- a/pinecone/config/config.py +++ b/pinecone/config/config.py @@ -1,9 +1,11 @@ -from typing import NamedTuple, Optional, Dict +from typing import NamedTuple, Optional, Dict, TYPE_CHECKING import os -from pinecone.exceptions.exceptions import PineconeConfigurationError -from pinecone.config.openapi import OpenApiConfigFactory -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.exceptions import PineconeConfigurationError +from pinecone.config.openapi_config_factory import OpenApiConfigFactory + +if TYPE_CHECKING: + from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration # Duplicated this util to help resolve circular imports @@ -81,8 +83,8 @@ def build( @staticmethod def build_openapi_config( - config: Config, openapi_config: Optional[OpenApiConfiguration] = None, **kwargs - ) -> OpenApiConfiguration: + config: Config, openapi_config: Optional["OpenApiConfiguration"] = None, **kwargs + ) -> "OpenApiConfiguration": if openapi_config: openapi_config = OpenApiConfigFactory.copy( openapi_config=openapi_config, api_key=config.api_key, host=config.host diff --git a/pinecone/config/openapi.py b/pinecone/config/openapi_config_factory.py similarity index 92% rename from pinecone/config/openapi.py rename to pinecone/config/openapi_config_factory.py index d6bdf702..d7730458 100644 --- a/pinecone/config/openapi.py +++ b/pinecone/config/openapi_config_factory.py @@ -1,13 +1,11 @@ import sys -from typing import List, Optional +from typing import List, Optional, Tuple import certifi import socket import copy -from urllib3.connection import HTTPConnection - -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration TCP_KEEPINTVL = 60 # Sec TCP_KEEPIDLE = 300 # Sec @@ -58,7 +56,7 @@ def _get_socket_options( keep_alive_idle_sec: int = TCP_KEEPIDLE, keep_alive_interval_sec: int = TCP_KEEPINTVL, keep_alive_tries: int = TCP_KEEPCNT, - ) -> List[tuple]: + ) -> List[Tuple[int, int, int]]: """ Returns the socket options to pass to OpenAPI's Rest client Args: @@ -72,7 +70,10 @@ def _get_socket_options( """ # Source: https://www.finbourne.com/blog/the-mysterious-hanging-client-tcp-keep-alives - socket_params = HTTPConnection.default_socket_options + # urllib3.connection.HTTPConnection.default_socket_options + socket_params = [ + (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + ] if not do_keep_alive: return socket_params diff --git a/pinecone/config/openapi_configuration.py b/pinecone/config/openapi_configuration.py new file mode 100644 index 00000000..9be701be --- /dev/null +++ b/pinecone/config/openapi_configuration.py @@ -0,0 +1,441 @@ +import copy +import logging +import multiprocessing + +from http import client as http_client +from pinecone.exceptions import PineconeApiValueError +from typing import TypedDict + + +class HostSetting(TypedDict): + url: str + description: str + + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + "multipleOf", + "maximum", + "exclusiveMaximum", + "minimum", + "exclusiveMinimum", + "maxLength", + "minLength", + "pattern", + "maxItems", + "minItems", +} + + +class Configuration: + """Class to hold the configuration of the API client. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + :param disabled_client_side_validations (string): Comma-separated list of + JSON schema validation keywords to disable JSON schema structural validation + rules. The following keywords may be specified: multipleOf, maximum, + exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, + maxItems, minItems. + By default, the validation is performed for data generated locally by the client + and data received from the server, independent of any validation performed by + the server side. If the input data does not satisfy the JSON schema validation + rules specified in the OpenAPI document, an exception is raised. + If disabled_client_side_validations is set, structural validation is + disabled. This can be useful to troubleshoot data validation problem, such as + when the OpenAPI document validation rules do not match the actual API data + received by the server. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format + + :Example: + + API Key Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + cookieAuth: # name for the security scheme + type: apiKey + in: cookie + name: JSESSIONID # cookie name + + You can programmatically set the cookie: + + conf = pinecone.openapi_support.Configuration( + api_key={'cookieAuth': 'abc123'} + api_key_prefix={'cookieAuth': 'JSESSIONID'} + ) + + The following cookie will be added to the HTTP request: + Cookie: JSESSIONID abc123 + """ + + _default = None + + def __init__( + self, + host=None, + api_key=None, + api_key_prefix=None, + discard_unknown_keys=False, + disabled_client_side_validations="", + server_index=None, + server_variables=None, + server_operation_index=None, + server_operation_variables=None, + ssl_ca_cert=None, + ): + """Constructor""" + self._base_path = "https://api.pinecone.io" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.discard_unknown_keys = discard_unknown_keys + self.disabled_client_side_validations = disabled_client_side_validations + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("pinecone.openapi_support") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = "%(asctime)s %(levelname)s %(message)s" + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = "" + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + # Options to pass down to the underlying urllib3 socket + self.socket_options = None + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ("logger", "logger_file_handler"): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + if name == "disabled_client_side_validations": + s = set(filter(None, value.split(","))) + for v in s: + if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: + raise PineconeApiValueError("Invalid keyword: '{0}''".format(v)) + self._disabled_client_side_validations = s + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on http_client debug + http_client.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off http_client debug + http_client.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if "ApiKeyAuth" in self.api_key: + auth["ApiKeyAuth"] = { + "type": "api_key", + "in": "header", + "key": "Api-Key", + "value": self.get_api_key_with_prefix("ApiKeyAuth"), + } + return auth + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [{"url": "https://api.pinecone.io", "description": "Production API endpoints"}] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. Must be less than {1}".format( + index, len(servers) + ) + ) + + url = server["url"] + + # go through variables and replace placeholders + for variable_name, variable in server.get("variables", {}).items(): + used_value = variables.get(variable_name, variable["default_value"]) + + if "enum_values" in variable and used_value not in variable["enum_values"]: + raise ValueError( + "The variable `{0}` in the host URL has invalid value {1}. Must be {2}.".format( + variable_name, variables[variable_name], variable["enum_values"] + ) + ) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None + + def __repr__(self): + attrs = [ + f"host={self.host}", + "api_key=***", + f"api_key_prefix={self.api_key_prefix}", + f"connection_pool_maxsize={self.connection_pool_maxsize}", + f"discard_unknown_keys={self.discard_unknown_keys}", + f"disabled_client_side_validations={self.disabled_client_side_validations}", + f"server_index={self.server_index}", + f"server_variables={self.server_variables}", + f"server_operation_index={self.server_operation_index}", + f"server_operation_variables={self.server_operation_variables}", + f"ssl_ca_cert={self.ssl_ca_cert}", + ] + return f"Configuration({', '.join(attrs)})" diff --git a/pinecone/control/__init__.py b/pinecone/control/__init__.py deleted file mode 100644 index a26e352a..00000000 --- a/pinecone/control/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .pinecone import Pinecone -from .pinecone_asyncio import PineconeAsyncio - -from .repr_overrides import install_repr_overrides - -install_repr_overrides() diff --git a/pinecone/core/openapi/db_control/__init__.py b/pinecone/core/openapi/db_control/__init__.py index 1a6949bb..31408552 100644 --- a/pinecone/core/openapi/db_control/__init__.py +++ b/pinecone/core/openapi/db_control/__init__.py @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException diff --git a/pinecone/core/openapi/db_data/__init__.py b/pinecone/core/openapi/db_data/__init__.py index e8cbbfe1..76701561 100644 --- a/pinecone/core/openapi/db_data/__init__.py +++ b/pinecone/core/openapi/db_data/__init__.py @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException diff --git a/pinecone/core/openapi/inference/__init__.py b/pinecone/core/openapi/inference/__init__.py index d878080c..9bf0fcdb 100644 --- a/pinecone/core/openapi/inference/__init__.py +++ b/pinecone/core/openapi/inference/__init__.py @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException diff --git a/pinecone/data/__init__.py b/pinecone/data/__init__.py deleted file mode 100644 index 8e040056..00000000 --- a/pinecone/data/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -from .index import ( - Index as _Index, - FetchResponse, - QueryResponse, - DescribeIndexStatsResponse, - UpsertResponse, - SparseValues, - Vector, -) -from .dataclasses import * -from .import_error import ( - Index, - IndexClientInstantiationError, - Inference, - InferenceInstantiationError, -) -from .index_asyncio import * -from .errors import ( - VectorDictionaryMissingKeysError, - VectorDictionaryExcessKeysError, - VectorTupleLengthError, - SparseValuesTypeError, - SparseValuesMissingKeysError, - SparseValuesDictionaryExpectedError, - MetadataDictionaryExpectedError, -) - -from .features.bulk_import import ImportErrorMode -from .features.inference import ( - Inference as _Inference, - AsyncioInference as _AsyncioInference, - RerankModel, - EmbedModel, -) diff --git a/pinecone/db_control/__init__.py b/pinecone/db_control/__init__.py new file mode 100644 index 00000000..73d82468 --- /dev/null +++ b/pinecone/db_control/__init__.py @@ -0,0 +1,7 @@ +from .enums import * +from .models import * +from .db_control import DBControl +from .db_control_asyncio import DBControlAsyncio +from .repr_overrides import install_repr_overrides + +install_repr_overrides() diff --git a/pinecone/control/db_control.py b/pinecone/db_control/db_control.py similarity index 79% rename from pinecone/control/db_control.py rename to pinecone/db_control/db_control.py index ca9f54d5..f03f349d 100644 --- a/pinecone/control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -12,8 +12,8 @@ """ @private """ if TYPE_CHECKING: - from .resources.index import IndexResource - from .resources.collection import CollectionResource + from .resources.sync.index import IndexResource + from .resources.sync.collection import CollectionResource class DBControl: @@ -21,6 +21,12 @@ def __init__(self, config, openapi_config, pool_threads): self.config = config """ @private """ + self.openapi_config = openapi_config + """ @private """ + + self.pool_threads = pool_threads + """ @private """ + self.index_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=ManageIndexesApi, @@ -40,7 +46,7 @@ def __init__(self, config, openapi_config, pool_threads): @property def index(self) -> "IndexResource": if self._index_resource is None: - from .resources.index import IndexResource + from .resources.sync.index import IndexResource self._index_resource = IndexResource(index_api=self.index_api, config=self.config) return self._index_resource @@ -48,7 +54,7 @@ def index(self) -> "IndexResource": @property def collection(self) -> "CollectionResource": if self._collection_resource is None: - from .resources.collection import CollectionResource + from .resources.sync.collection import CollectionResource self._collection_resource = CollectionResource(self.index_api) return self._collection_resource diff --git a/pinecone/control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py similarity index 86% rename from pinecone/control/db_control_asyncio.py rename to pinecone/db_control/db_control_asyncio.py index 3f10df6b..2fce306e 100644 --- a/pinecone/control/db_control_asyncio.py +++ b/pinecone/db_control/db_control_asyncio.py @@ -12,8 +12,8 @@ if TYPE_CHECKING: - from .resources_asyncio.index import IndexResourceAsyncio - from .resources_asyncio.collection import CollectionResourceAsyncio + from .resources.asyncio.index import IndexResourceAsyncio + from .resources.asyncio.collection import CollectionResourceAsyncio class DBControlAsyncio: @@ -39,7 +39,7 @@ def __init__(self, config, openapi_config, pool_threads): @property def index(self) -> "IndexResourceAsyncio": if self._index_resource is None: - from .resources_asyncio.index import IndexResourceAsyncio + from .resources.asyncio.index import IndexResourceAsyncio self._index_resource = IndexResourceAsyncio( index_api=self.index_api, config=self.config @@ -49,7 +49,7 @@ def index(self) -> "IndexResourceAsyncio": @property def collection(self) -> "CollectionResourceAsyncio": if self._collection_resource is None: - from .resources_asyncio.collection import CollectionResourceAsyncio + from .resources.asyncio.collection import CollectionResourceAsyncio self._collection_resource = CollectionResourceAsyncio(self.index_api) return self._collection_resource diff --git a/pinecone/enums/__init__.py b/pinecone/db_control/enums/__init__.py similarity index 100% rename from pinecone/enums/__init__.py rename to pinecone/db_control/enums/__init__.py diff --git a/pinecone/enums/clouds.py b/pinecone/db_control/enums/clouds.py similarity index 100% rename from pinecone/enums/clouds.py rename to pinecone/db_control/enums/clouds.py diff --git a/pinecone/enums/deletion_protection.py b/pinecone/db_control/enums/deletion_protection.py similarity index 100% rename from pinecone/enums/deletion_protection.py rename to pinecone/db_control/enums/deletion_protection.py diff --git a/pinecone/enums/metric.py b/pinecone/db_control/enums/metric.py similarity index 100% rename from pinecone/enums/metric.py rename to pinecone/db_control/enums/metric.py diff --git a/pinecone/enums/pod_index_environment.py b/pinecone/db_control/enums/pod_index_environment.py similarity index 100% rename from pinecone/enums/pod_index_environment.py rename to pinecone/db_control/enums/pod_index_environment.py diff --git a/pinecone/enums/pod_type.py b/pinecone/db_control/enums/pod_type.py similarity index 100% rename from pinecone/enums/pod_type.py rename to pinecone/db_control/enums/pod_type.py diff --git a/pinecone/enums/vector_type.py b/pinecone/db_control/enums/vector_type.py similarity index 100% rename from pinecone/enums/vector_type.py rename to pinecone/db_control/enums/vector_type.py diff --git a/pinecone/control/index_host_store.py b/pinecone/db_control/index_host_store.py similarity index 100% rename from pinecone/control/index_host_store.py rename to pinecone/db_control/index_host_store.py diff --git a/pinecone/models/__init__.py b/pinecone/db_control/models/__init__.py similarity index 90% rename from pinecone/models/__init__.py rename to pinecone/db_control/models/__init__.py index 86306c1e..34003bfe 100644 --- a/pinecone/models/__init__.py +++ b/pinecone/db_control/models/__init__.py @@ -5,7 +5,7 @@ from .index_list import IndexList from .collection_list import CollectionList from .index_model import IndexModel -from .index_embed import IndexEmbed +from ...inference.models.index_embed import IndexEmbed __all__ = [ "CollectionDescription", diff --git a/pinecone/models/collection_description.py b/pinecone/db_control/models/collection_description.py similarity index 100% rename from pinecone/models/collection_description.py rename to pinecone/db_control/models/collection_description.py diff --git a/pinecone/models/collection_list.py b/pinecone/db_control/models/collection_list.py similarity index 100% rename from pinecone/models/collection_list.py rename to pinecone/db_control/models/collection_list.py diff --git a/pinecone/models/index_description.py b/pinecone/db_control/models/index_description.py similarity index 100% rename from pinecone/models/index_description.py rename to pinecone/db_control/models/index_description.py diff --git a/pinecone/models/index_list.py b/pinecone/db_control/models/index_list.py similarity index 100% rename from pinecone/models/index_list.py rename to pinecone/db_control/models/index_list.py diff --git a/pinecone/models/index_model.py b/pinecone/db_control/models/index_model.py similarity index 81% rename from pinecone/models/index_model.py rename to pinecone/db_control/models/index_model.py index 7deb2d7d..75ba1f30 100644 --- a/pinecone/models/index_model.py +++ b/pinecone/db_control/models/index_model.py @@ -1,4 +1,4 @@ -from pinecone.core.openapi.db_control.models import IndexModel as OpenAPIIndexModel +from pinecone.core.openapi.db_control.model.index_model import IndexModel as OpenAPIIndexModel class IndexModel: diff --git a/pinecone/models/list_response.py b/pinecone/db_control/models/list_response.py similarity index 100% rename from pinecone/models/list_response.py rename to pinecone/db_control/models/list_response.py diff --git a/pinecone/models/pod_spec.py b/pinecone/db_control/models/pod_spec.py similarity index 100% rename from pinecone/models/pod_spec.py rename to pinecone/db_control/models/pod_spec.py diff --git a/pinecone/models/serverless_spec.py b/pinecone/db_control/models/serverless_spec.py similarity index 100% rename from pinecone/models/serverless_spec.py rename to pinecone/db_control/models/serverless_spec.py diff --git a/pinecone/control/repr_overrides.py b/pinecone/db_control/repr_overrides.py similarity index 79% rename from pinecone/control/repr_overrides.py rename to pinecone/db_control/repr_overrides.py index 98e4b4d4..714b8dfb 100644 --- a/pinecone/control/repr_overrides.py +++ b/pinecone/db_control/repr_overrides.py @@ -1,6 +1,6 @@ from pinecone.utils import install_json_repr_override -from pinecone.models.index_model import IndexModel -from pinecone.core.openapi.db_control.models import CollectionModel +from pinecone.db_control.models.index_model import IndexModel +from pinecone.core.openapi.db_control.model.collection_model import CollectionModel def install_repr_overrides(): diff --git a/pinecone/control/request_factory.py b/pinecone/db_control/request_factory.py similarity index 98% rename from pinecone/control/request_factory.py rename to pinecone/db_control/request_factory.py index d4d0ce63..2e796745 100644 --- a/pinecone/control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -19,10 +19,10 @@ PodSpec as PodSpecModel, PodSpecMetadataConfig, ) -from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed +from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed from pinecone.utils import parse_non_empty_args -from pinecone.enums import ( +from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, diff --git a/pinecone/data/features/__init__.py b/pinecone/db_control/resources/__init__.py similarity index 100% rename from pinecone/data/features/__init__.py rename to pinecone/db_control/resources/__init__.py diff --git a/pinecone/db_control/resources/asyncio/__init__.py b/pinecone/db_control/resources/asyncio/__init__.py new file mode 100644 index 00000000..9a4841d3 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/__init__.py @@ -0,0 +1,2 @@ +from .index import IndexResourceAsyncio +from .collection import CollectionResourceAsyncio diff --git a/pinecone/control/resources_asyncio/collection.py b/pinecone/db_control/resources/asyncio/collection.py similarity index 85% rename from pinecone/control/resources_asyncio/collection.py rename to pinecone/db_control/resources/asyncio/collection.py index 42d430eb..33c1f3d0 100644 --- a/pinecone/control/resources_asyncio/collection.py +++ b/pinecone/db_control/resources/asyncio/collection.py @@ -2,9 +2,9 @@ from typing import TYPE_CHECKING -from pinecone.models import CollectionList +from pinecone.db_control.models import CollectionList -from ..request_factory import PineconeDBControlRequestFactory +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory logger = logging.getLogger(__name__) """ @private """ diff --git a/pinecone/control/resources_asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py similarity index 93% rename from pinecone/control/resources_asyncio/index.py rename to pinecone/db_control/resources/asyncio/index.py index d1bc037f..2d93ae01 100644 --- a/pinecone/control/resources_asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -3,10 +3,10 @@ from typing import Optional, Dict, Union -from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed from pinecone.utils import docslinks -from pinecone.enums import ( +from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -16,8 +16,10 @@ GcpRegion, AzureRegion, ) -from pinecone.control.types import CreateIndexForModelEmbedTypedDict -from pinecone.control.request_factory import PineconeDBControlRequestFactory +from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.core.openapi.db_control import API_VERSION + logger = logging.getLogger(__name__) """ @private """ @@ -101,7 +103,7 @@ async def is_ready() -> bool: while not await is_ready(): if timeout < 0: logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] + link = docslinks["API_DESCRIBE_INDEX"](API_VERSION) timeout_msg = ( f"Please call describe_index() to confirm index status. See docs at {link}" ) diff --git a/pinecone/control/resources/__init__.py b/pinecone/db_control/resources/sync/__init__.py similarity index 100% rename from pinecone/control/resources/__init__.py rename to pinecone/db_control/resources/sync/__init__.py diff --git a/pinecone/control/resources/collection.py b/pinecone/db_control/resources/sync/collection.py similarity index 84% rename from pinecone/control/resources/collection.py rename to pinecone/db_control/resources/sync/collection.py index dba438b1..1d8d11d8 100644 --- a/pinecone/control/resources/collection.py +++ b/pinecone/db_control/resources/sync/collection.py @@ -1,7 +1,7 @@ import logging -from pinecone.models import CollectionList -from ..request_factory import PineconeDBControlRequestFactory +from pinecone.db_control.models import CollectionList +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory logger = logging.getLogger(__name__) """ @private """ diff --git a/pinecone/control/resources/index.py b/pinecone/db_control/resources/sync/index.py similarity index 92% rename from pinecone/control/resources/index.py rename to pinecone/db_control/resources/sync/index.py index df6fd203..85876d6c 100644 --- a/pinecone/control/resources/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -2,12 +2,12 @@ import logging from typing import Optional, Dict, Union -from ..index_host_store import IndexHostStore +from pinecone.db_control.index_host_store import IndexHostStore -from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed from pinecone.utils import docslinks -from pinecone.enums import ( +from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -17,8 +17,9 @@ GcpRegion, AzureRegion, ) -from ..types import CreateIndexForModelEmbedTypedDict -from ..request_factory import PineconeDBControlRequestFactory +from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.core.openapi.db_control import API_VERSION logger = logging.getLogger(__name__) """ @private """ @@ -108,7 +109,7 @@ def is_ready() -> bool: while not is_ready(): if timeout < 0: logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] + link = docslinks["API_DESCRIBE_INDEX"](API_VERSION) timeout_msg = ( f"Please call describe_index() to confirm index status. See docs at {link}" ) diff --git a/pinecone/control/types/__init__.py b/pinecone/db_control/types/__init__.py similarity index 100% rename from pinecone/control/types/__init__.py rename to pinecone/db_control/types/__init__.py diff --git a/pinecone/control/types/create_index_for_model_embed.py b/pinecone/db_control/types/create_index_for_model_embed.py similarity index 72% rename from pinecone/control/types/create_index_for_model_embed.py rename to pinecone/db_control/types/create_index_for_model_embed.py index 123474a0..ab7e43ac 100644 --- a/pinecone/control/types/create_index_for_model_embed.py +++ b/pinecone/db_control/types/create_index_for_model_embed.py @@ -1,6 +1,6 @@ from typing import TypedDict, Dict, Union -from ...enums import Metric -from ...data.features.inference import EmbedModel +from pinecone.db_control.enums import Metric +from pinecone.inference import EmbedModel class CreateIndexForModelEmbedTypedDict(TypedDict): diff --git a/pinecone/db_data/__init__.py b/pinecone/db_data/__init__.py new file mode 100644 index 00000000..7c76f04f --- /dev/null +++ b/pinecone/db_data/__init__.py @@ -0,0 +1,53 @@ +from .index import ( + Index as _Index, + FetchResponse, + QueryResponse, + DescribeIndexStatsResponse, + UpsertResponse, + SparseValues, + Vector, +) +from .dataclasses import * +from .import_error import ( + Index, + IndexClientInstantiationError, + Inference, + InferenceInstantiationError, +) +from .index_asyncio import * +from .errors import ( + VectorDictionaryMissingKeysError, + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + SparseValuesTypeError, + SparseValuesMissingKeysError, + SparseValuesDictionaryExpectedError, + MetadataDictionaryExpectedError, +) + +from .features.bulk_import import ImportErrorMode + + +import warnings + +def _get_deprecated_import(name, from_module, to_module): + warnings.warn( + f"The import of `{name}` from `{from_module}` has moved to `{to_module}`. " + f"Please update your imports from `from {from_module} import {name}` " + f"to `from {to_module} import {name}`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + ) + # Import from the new location + from pinecone.inference import Inference as _Inference, AsyncioInference as _AsyncioInference, RerankModel, EmbedModel + return locals()[name] + +moved = ["_Inference", "_AsyncioInference", "RerankModel", "EmbedModel"] + +def __getattr__(name): + if name in locals(): + return locals()[name] + elif name in moved: + return _get_deprecated_import(name, "pinecone.data", "pinecone.inference") + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + diff --git a/pinecone/data/dataclasses/__init__.py b/pinecone/db_data/dataclasses/__init__.py similarity index 100% rename from pinecone/data/dataclasses/__init__.py rename to pinecone/db_data/dataclasses/__init__.py diff --git a/pinecone/data/dataclasses/fetch_response.py b/pinecone/db_data/dataclasses/fetch_response.py similarity index 100% rename from pinecone/data/dataclasses/fetch_response.py rename to pinecone/db_data/dataclasses/fetch_response.py diff --git a/pinecone/data/dataclasses/search_query.py b/pinecone/db_data/dataclasses/search_query.py similarity index 100% rename from pinecone/data/dataclasses/search_query.py rename to pinecone/db_data/dataclasses/search_query.py diff --git a/pinecone/data/dataclasses/search_query_vector.py b/pinecone/db_data/dataclasses/search_query_vector.py similarity index 100% rename from pinecone/data/dataclasses/search_query_vector.py rename to pinecone/db_data/dataclasses/search_query_vector.py diff --git a/pinecone/data/dataclasses/search_rerank.py b/pinecone/db_data/dataclasses/search_rerank.py similarity index 100% rename from pinecone/data/dataclasses/search_rerank.py rename to pinecone/db_data/dataclasses/search_rerank.py diff --git a/pinecone/data/dataclasses/sparse_values.py b/pinecone/db_data/dataclasses/sparse_values.py similarity index 100% rename from pinecone/data/dataclasses/sparse_values.py rename to pinecone/db_data/dataclasses/sparse_values.py diff --git a/pinecone/data/dataclasses/utils.py b/pinecone/db_data/dataclasses/utils.py similarity index 100% rename from pinecone/data/dataclasses/utils.py rename to pinecone/db_data/dataclasses/utils.py diff --git a/pinecone/data/dataclasses/vector.py b/pinecone/db_data/dataclasses/vector.py similarity index 100% rename from pinecone/data/dataclasses/vector.py rename to pinecone/db_data/dataclasses/vector.py diff --git a/pinecone/data/errors.py b/pinecone/db_data/errors.py similarity index 100% rename from pinecone/data/errors.py rename to pinecone/db_data/errors.py diff --git a/pinecone/db_data/features/__init__.py b/pinecone/db_data/features/__init__.py new file mode 100644 index 00000000..b8f2fddb --- /dev/null +++ b/pinecone/db_data/features/__init__.py @@ -0,0 +1,13 @@ +import warnings + +# Display a warning for old imports +warnings.warn( + "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " + "Please update your imports from `from pinecone.data.features.inference import Inference, AsyncioInference, RerankModel, EmbedModel` " + "to `from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +# Import from the new location to maintain backward compatibility +from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel diff --git a/pinecone/data/features/bulk_import/__init__.py b/pinecone/db_data/features/bulk_import/__init__.py similarity index 100% rename from pinecone/data/features/bulk_import/__init__.py rename to pinecone/db_data/features/bulk_import/__init__.py diff --git a/pinecone/data/features/bulk_import/bulk_import.py b/pinecone/db_data/features/bulk_import/bulk_import.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import.py rename to pinecone/db_data/features/bulk_import/bulk_import.py diff --git a/pinecone/data/features/bulk_import/bulk_import_asyncio.py b/pinecone/db_data/features/bulk_import/bulk_import_asyncio.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import_asyncio.py rename to pinecone/db_data/features/bulk_import/bulk_import_asyncio.py diff --git a/pinecone/data/features/bulk_import/bulk_import_request_factory.py b/pinecone/db_data/features/bulk_import/bulk_import_request_factory.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import_request_factory.py rename to pinecone/db_data/features/bulk_import/bulk_import_request_factory.py diff --git a/pinecone/db_data/features/inference/__init__.py b/pinecone/db_data/features/inference/__init__.py new file mode 100644 index 00000000..897b4f4f --- /dev/null +++ b/pinecone/db_data/features/inference/__init__.py @@ -0,0 +1,11 @@ +import warnings + +warnings.warn( + "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " + "Please update your imports from `from pinecone.data.features.inference import Inference, AsyncioInference, RerankModel, EmbedModel` " + "to `from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.inference import * diff --git a/pinecone/data/import_error.py b/pinecone/db_data/import_error.py similarity index 100% rename from pinecone/data/import_error.py rename to pinecone/db_data/import_error.py diff --git a/pinecone/data/index.py b/pinecone/db_data/index.py similarity index 100% rename from pinecone/data/index.py rename to pinecone/db_data/index.py diff --git a/pinecone/data/index_asyncio.py b/pinecone/db_data/index_asyncio.py similarity index 100% rename from pinecone/data/index_asyncio.py rename to pinecone/db_data/index_asyncio.py diff --git a/pinecone/data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py similarity index 100% rename from pinecone/data/index_asyncio_interface.py rename to pinecone/db_data/index_asyncio_interface.py diff --git a/pinecone/data/interfaces.py b/pinecone/db_data/interfaces.py similarity index 100% rename from pinecone/data/interfaces.py rename to pinecone/db_data/interfaces.py diff --git a/pinecone/data/query_results_aggregator.py b/pinecone/db_data/query_results_aggregator.py similarity index 100% rename from pinecone/data/query_results_aggregator.py rename to pinecone/db_data/query_results_aggregator.py diff --git a/pinecone/data/request_factory.py b/pinecone/db_data/request_factory.py similarity index 100% rename from pinecone/data/request_factory.py rename to pinecone/db_data/request_factory.py diff --git a/pinecone/data/sparse_values_factory.py b/pinecone/db_data/sparse_values_factory.py similarity index 100% rename from pinecone/data/sparse_values_factory.py rename to pinecone/db_data/sparse_values_factory.py diff --git a/pinecone/data/types/__init__.py b/pinecone/db_data/types/__init__.py similarity index 100% rename from pinecone/data/types/__init__.py rename to pinecone/db_data/types/__init__.py diff --git a/pinecone/data/types/query_filter.py b/pinecone/db_data/types/query_filter.py similarity index 100% rename from pinecone/data/types/query_filter.py rename to pinecone/db_data/types/query_filter.py diff --git a/pinecone/data/types/search_query_typed_dict.py b/pinecone/db_data/types/search_query_typed_dict.py similarity index 100% rename from pinecone/data/types/search_query_typed_dict.py rename to pinecone/db_data/types/search_query_typed_dict.py diff --git a/pinecone/data/types/search_query_vector_typed_dict.py b/pinecone/db_data/types/search_query_vector_typed_dict.py similarity index 100% rename from pinecone/data/types/search_query_vector_typed_dict.py rename to pinecone/db_data/types/search_query_vector_typed_dict.py diff --git a/pinecone/data/types/search_rerank_typed_dict.py b/pinecone/db_data/types/search_rerank_typed_dict.py similarity index 100% rename from pinecone/data/types/search_rerank_typed_dict.py rename to pinecone/db_data/types/search_rerank_typed_dict.py diff --git a/pinecone/data/types/sparse_vector_typed_dict.py b/pinecone/db_data/types/sparse_vector_typed_dict.py similarity index 100% rename from pinecone/data/types/sparse_vector_typed_dict.py rename to pinecone/db_data/types/sparse_vector_typed_dict.py diff --git a/pinecone/data/types/vector_metadata_dict.py b/pinecone/db_data/types/vector_metadata_dict.py similarity index 100% rename from pinecone/data/types/vector_metadata_dict.py rename to pinecone/db_data/types/vector_metadata_dict.py diff --git a/pinecone/data/types/vector_tuple.py b/pinecone/db_data/types/vector_tuple.py similarity index 100% rename from pinecone/data/types/vector_tuple.py rename to pinecone/db_data/types/vector_tuple.py diff --git a/pinecone/data/types/vector_typed_dict.py b/pinecone/db_data/types/vector_typed_dict.py similarity index 100% rename from pinecone/data/types/vector_typed_dict.py rename to pinecone/db_data/types/vector_typed_dict.py diff --git a/pinecone/data/vector_factory.py b/pinecone/db_data/vector_factory.py similarity index 100% rename from pinecone/data/vector_factory.py rename to pinecone/db_data/vector_factory.py diff --git a/pinecone/exceptions/__init__.py b/pinecone/exceptions/__init__.py index 92b05fd7..f437e90b 100644 --- a/pinecone/exceptions/__init__.py +++ b/pinecone/exceptions/__init__.py @@ -1,4 +1,7 @@ -from pinecone.openapi_support.exceptions import ( +from .exceptions import ( + PineconeConfigurationError, + PineconeProtocolError, + ListConversionException, PineconeException, PineconeApiAttributeError, PineconeApiTypeError, @@ -10,7 +13,6 @@ ForbiddenException, ServiceException, ) -from .exceptions import PineconeConfigurationError, PineconeProtocolError, ListConversionException __all__ = [ "PineconeConfigurationError", diff --git a/pinecone/exceptions/exceptions.py b/pinecone/exceptions/exceptions.py index 3860dc8b..32eed99f 100644 --- a/pinecone/exceptions/exceptions.py +++ b/pinecone/exceptions/exceptions.py @@ -1,4 +1,143 @@ -from pinecone.openapi_support.exceptions import PineconeException +class PineconeException(Exception): + """The base exception class for all exceptions in the Pinecone Python SDK""" + + +class PineconeApiTypeError(PineconeException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: + """Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiTypeError, self).__init__(full_msg) + + +class PineconeApiValueError(PineconeException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiValueError, self).__init__(full_msg) + + +class PineconeApiAttributeError(PineconeException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiAttributeError, self).__init__(full_msg) + + +class PineconeApiKeyError(PineconeException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiKeyError, self).__init__(full_msg) + + +class PineconeApiException(PineconeException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\nReason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format(self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +class NotFoundException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(NotFoundException, self).__init__(status, reason, http_resp) + + +class UnauthorizedException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(UnauthorizedException, self).__init__(status, reason, http_resp) + + +class ForbiddenException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(ForbiddenException, self).__init__(status, reason, http_resp) + + +class ServiceException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(ServiceException, self).__init__(status, reason, http_resp) + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result class PineconeProtocolError(PineconeException): diff --git a/pinecone/grpc/__init__.py b/pinecone/grpc/__init__.py index 350047ca..66adb916 100644 --- a/pinecone/grpc/__init__.py +++ b/pinecone/grpc/__init__.py @@ -49,7 +49,7 @@ from .config import GRPCClientConfig from .future import PineconeGrpcFuture -from pinecone.data.dataclasses import Vector, SparseValues +from pinecone.db_data.dataclasses import Vector, SparseValues from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( Vector as GRPCVector, diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index 9b68c0b6..bfaf8fff 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -21,7 +21,7 @@ QueryResponse, IndexDescription as DescribeIndexStatsResponse, ) -from pinecone.models.list_response import ListResponse as SimpleListResponse, Pagination +from pinecone.db_control.models.list_response import ListResponse as SimpleListResponse, Pagination from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( Vector as GRPCVector, QueryVector as GRPCQueryVector, @@ -38,11 +38,11 @@ SparseValues as GRPCSparseValues, ) from pinecone import Vector, SparseValues -from pinecone.data.query_results_aggregator import QueryNamespacesResults, QueryResultsAggregator +from pinecone.db_data.query_results_aggregator import QueryNamespacesResults, QueryResultsAggregator from pinecone.core.grpc.protos.db_data_2025_01_pb2_grpc import VectorServiceStub from .base import GRPCIndexBase from .future import PineconeGrpcFuture -from ..data.types import ( +from ..db_data.types import ( SparseVectorTypedDict, VectorTypedDict, VectorTuple, diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index b03b139a..28a13622 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -1,5 +1,5 @@ -from ..control.pinecone import Pinecone -from ..config.config import ConfigBuilder +from pinecone import Pinecone +from pinecone.config import ConfigBuilder from .index_grpc import GRPCIndex diff --git a/pinecone/grpc/sparse_values_factory.py b/pinecone/grpc/sparse_values_factory.py index 240cd8e1..5bb14685 100644 --- a/pinecone/grpc/sparse_values_factory.py +++ b/pinecone/grpc/sparse_values_factory.py @@ -3,8 +3,8 @@ from ..utils import convert_to_list -from ..data import SparseValuesTypeError, SparseValuesMissingKeysError -from ..data.types import SparseVectorTypedDict +from ..db_data import SparseValuesTypeError, SparseValuesMissingKeysError +from ..db_data.types import SparseVectorTypedDict from pinecone.core.grpc.protos.db_data_2025_01_pb2 import SparseValues as GRPCSparseValues from pinecone.core.openapi.db_data.models import SparseValues as OpenApiSparseValues diff --git a/pinecone/grpc/utils.py b/pinecone/grpc/utils.py index dcd19710..c2869e73 100644 --- a/pinecone/grpc/utils.py +++ b/pinecone/grpc/utils.py @@ -13,7 +13,7 @@ IndexDescription as DescribeIndexStatsResponse, NamespaceSummary, ) -from pinecone.data.dataclasses import FetchResponse +from pinecone.db_data.dataclasses import FetchResponse from google.protobuf.struct_pb2 import Struct diff --git a/pinecone/grpc/vector_factory_grpc.py b/pinecone/grpc/vector_factory_grpc.py index 1fe9572b..22efd269 100644 --- a/pinecone/grpc/vector_factory_grpc.py +++ b/pinecone/grpc/vector_factory_grpc.py @@ -8,13 +8,13 @@ from .utils import dict_to_proto_struct from ..utils import fix_tuple_length, convert_to_list from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS -from ..data import ( +from ..db_data import ( VectorDictionaryMissingKeysError, VectorDictionaryExcessKeysError, VectorTupleLengthError, MetadataDictionaryExpectedError, ) -from ..data.types import VectorTuple, VectorTypedDict +from ..db_data.types import VectorTuple, VectorTypedDict from .sparse_values_factory import SparseValuesFactory from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( diff --git a/pinecone/data/features/inference/__init__.py b/pinecone/inference/__init__.py similarity index 100% rename from pinecone/data/features/inference/__init__.py rename to pinecone/inference/__init__.py diff --git a/pinecone/data/features/inference/inference.py b/pinecone/inference/inference.py similarity index 100% rename from pinecone/data/features/inference/inference.py rename to pinecone/inference/inference.py diff --git a/pinecone/data/features/inference/inference_asyncio.py b/pinecone/inference/inference_asyncio.py similarity index 100% rename from pinecone/data/features/inference/inference_asyncio.py rename to pinecone/inference/inference_asyncio.py diff --git a/pinecone/data/features/inference/inference_request_builder.py b/pinecone/inference/inference_request_builder.py similarity index 100% rename from pinecone/data/features/inference/inference_request_builder.py rename to pinecone/inference/inference_request_builder.py diff --git a/pinecone/data/features/inference/models/__init__.py b/pinecone/inference/models/__init__.py similarity index 100% rename from pinecone/data/features/inference/models/__init__.py rename to pinecone/inference/models/__init__.py diff --git a/pinecone/data/features/inference/models/embedding_list.py b/pinecone/inference/models/embedding_list.py similarity index 100% rename from pinecone/data/features/inference/models/embedding_list.py rename to pinecone/inference/models/embedding_list.py diff --git a/pinecone/models/index_embed.py b/pinecone/inference/models/index_embed.py similarity index 94% rename from pinecone/models/index_embed.py rename to pinecone/inference/models/index_embed.py index 4d1ccfe3..4c3306d0 100644 --- a/pinecone/models/index_embed.py +++ b/pinecone/inference/models/index_embed.py @@ -1,8 +1,8 @@ from dataclasses import dataclass from typing import Optional, Dict, Any, Union -from ..enums import Metric -from ..data.features.inference import EmbedModel +from pinecone.db_control.enums import Metric +from pinecone.inference.inference_request_builder import EmbedModel @dataclass(frozen=True) diff --git a/pinecone/data/features/inference/models/rerank_result.py b/pinecone/inference/models/rerank_result.py similarity index 100% rename from pinecone/data/features/inference/models/rerank_result.py rename to pinecone/inference/models/rerank_result.py diff --git a/pinecone/data/features/inference/repl_overrides.py b/pinecone/inference/repl_overrides.py similarity index 100% rename from pinecone/data/features/inference/repl_overrides.py rename to pinecone/inference/repl_overrides.py diff --git a/pinecone/control/langchain_import_warnings.py b/pinecone/langchain_import_warnings.py similarity index 100% rename from pinecone/control/langchain_import_warnings.py rename to pinecone/langchain_import_warnings.py diff --git a/pinecone/control/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py similarity index 99% rename from pinecone/control/legacy_pinecone_interface.py rename to pinecone/legacy_pinecone_interface.py index a6a7779a..0b097261 100644 --- a/pinecone/control/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -3,7 +3,7 @@ from typing import Optional, Dict, Union, TYPE_CHECKING if TYPE_CHECKING: - from pinecone.models import ( + from pinecone.db_control.models import ( ServerlessSpec, PodSpec, IndexList, @@ -11,7 +11,7 @@ IndexModel, IndexEmbed, ) - from pinecone.enums import ( + from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -21,7 +21,7 @@ GcpRegion, AzureRegion, ) - from .types import CreateIndexForModelEmbedTypedDict + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict class LegacyPineconeDBControlInterface(ABC): diff --git a/pinecone/openapi_support/__init__.py b/pinecone/openapi_support/__init__.py index 63e3fb0a..890c3007 100644 --- a/pinecone/openapi_support/__init__.py +++ b/pinecone/openapi_support/__init__.py @@ -8,7 +8,7 @@ from .endpoint_utils import ExtraOpenApiKwargsTypedDict, KwargsWithOpenApiKwargDefaultsTypedDict from .asyncio_api_client import AsyncioApiClient from .asyncio_endpoint import AsyncioEndpoint -from .configuration import Configuration +from .configuration_lazy import Configuration from .exceptions import ( PineconeException, PineconeApiAttributeError, diff --git a/pinecone/openapi_support/api_client.py b/pinecone/openapi_support/api_client.py index 421d56cc..9e1f1f7e 100644 --- a/pinecone/openapi_support/api_client.py +++ b/pinecone/openapi_support/api_client.py @@ -1,14 +1,14 @@ import atexit -from multiprocessing.pool import ThreadPool -from concurrent.futures import ThreadPoolExecutor import io -from typing import Optional, List, Tuple, Dict, Any, Union -from .deserializer import Deserializer +from typing import Optional, List, Tuple, Dict, Any, Union, TYPE_CHECKING +if TYPE_CHECKING: + from multiprocessing.pool import ThreadPool + from concurrent.futures import ThreadPoolExecutor from .rest_urllib3 import Urllib3RestClient -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .exceptions import PineconeApiValueError, PineconeApiException from .api_client_utils import ( parameters_to_tuples, @@ -30,8 +30,8 @@ class ApiClient(object): to the API. More threads means more concurrent API requests. """ - _pool: Optional[ThreadPool] = None - _threadpool_executor: Optional[ThreadPoolExecutor] = None + _pool: Optional["ThreadPool"] = None + _threadpool_executor: Optional["ThreadPoolExecutor"] = None def __init__( self, configuration: Optional[Configuration] = None, pool_threads: Optional[int] = 1 @@ -64,18 +64,20 @@ def close(self): atexit.unregister(self.close) @property - def pool(self): + def pool(self) -> "ThreadPool": """Create thread pool on first request avoids instantiating unused threadpool for blocking clients. """ if self._pool is None: + from multiprocessing.pool import ThreadPool atexit.register(self.close) self._pool = ThreadPool(self.pool_threads) return self._pool @property - def threadpool_executor(self): + def threadpool_executor(self) -> "ThreadPoolExecutor": if self._threadpool_executor is None: + from concurrent.futures import ThreadPoolExecutor self._threadpool_executor = ThreadPoolExecutor(max_workers=self.pool_threads) return self._threadpool_executor @@ -186,6 +188,7 @@ def __call_api( # deserialize response data if response_type: + from .deserializer import Deserializer Deserializer.decode_response(response_type=response_type, response=response_data) return_data = Deserializer.deserialize( response=response_data, diff --git a/pinecone/openapi_support/asyncio_api_client.py b/pinecone/openapi_support/asyncio_api_client.py index 51f2e0ce..43c8e17b 100644 --- a/pinecone/openapi_support/asyncio_api_client.py +++ b/pinecone/openapi_support/asyncio_api_client.py @@ -7,7 +7,7 @@ from .rest_aiohttp import AiohttpRestClient -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .exceptions import PineconeApiValueError, PineconeApiException from .api_client_utils import ( parameters_to_tuples, diff --git a/pinecone/openapi_support/configuration.py b/pinecone/openapi_support/configuration.py index fb6d7d19..cd31b0b9 100644 --- a/pinecone/openapi_support/configuration.py +++ b/pinecone/openapi_support/configuration.py @@ -1,441 +1 @@ -import copy -import logging -import multiprocessing - -from http import client as http_client -from .exceptions import PineconeApiValueError -from typing import TypedDict - - -class HostSetting(TypedDict): - url: str - description: str - - -JSON_SCHEMA_VALIDATION_KEYWORDS = { - "multipleOf", - "maximum", - "exclusiveMaximum", - "minimum", - "exclusiveMinimum", - "maxLength", - "minLength", - "pattern", - "maxItems", - "minItems", -} - - -class Configuration: - """Class to hold the configuration of the API client. - - :param host: Base url - :param api_key: Dict to store API key(s). - Each entry in the dict specifies an API key. - The dict key is the name of the security scheme in the OAS specification. - The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) - The dict key is the name of the security scheme in the OAS specification. - The dict value is an API key prefix when generating the auth data. - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. - :param server_operation_index: Mapping from operation ID to an index to server - configuration. - :param server_operation_variables: Mapping from operation ID to a mapping with - string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. - :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format - - :Example: - - API Key Authentication Example. - Given the following security scheme in the OpenAPI specification: - components: - securitySchemes: - cookieAuth: # name for the security scheme - type: apiKey - in: cookie - name: JSESSIONID # cookie name - - You can programmatically set the cookie: - - conf = pinecone.openapi_support.Configuration( - api_key={'cookieAuth': 'abc123'} - api_key_prefix={'cookieAuth': 'JSESSIONID'} - ) - - The following cookie will be added to the HTTP request: - Cookie: JSESSIONID abc123 - """ - - _default = None - - def __init__( - self, - host=None, - api_key=None, - api_key_prefix=None, - discard_unknown_keys=False, - disabled_client_side_validations="", - server_index=None, - server_variables=None, - server_operation_index=None, - server_operation_variables=None, - ssl_ca_cert=None, - ): - """Constructor""" - self._base_path = "https://api.pinecone.io" if host is None else host - """Default Base url - """ - self.server_index = 0 if server_index is None and host is None else server_index - self.server_operation_index = server_operation_index or {} - """Default server index - """ - self.server_variables = server_variables or {} - self.server_operation_variables = server_operation_variables or {} - """Default server variables - """ - self.temp_folder_path = None - """Temp file folder for downloading files - """ - # Authentication Settings - self.api_key = {} - if api_key: - self.api_key = api_key - """dict to store API key(s) - """ - self.api_key_prefix = {} - if api_key_prefix: - self.api_key_prefix = api_key_prefix - """dict to store API prefix (e.g. Bearer) - """ - self.refresh_api_key_hook = None - """function hook to refresh API key if expired - """ - self.discard_unknown_keys = discard_unknown_keys - self.disabled_client_side_validations = disabled_client_side_validations - self.logger = {} - """Logging Settings - """ - self.logger["package_logger"] = logging.getLogger("pinecone.openapi_support") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - self.logger_format = "%(asctime)s %(levelname)s %(message)s" - """Log format - """ - self.logger_stream_handler = None - """Log stream handler - """ - self.logger_file_handler = None - """Log file handler - """ - self.logger_file = None - """Debug file location - """ - self.debug = False - """Debug switch - """ - - self.verify_ssl = True - """SSL/TLS verification - Set this to false to skip verifying SSL certificate when calling API - from https server. - """ - self.ssl_ca_cert = ssl_ca_cert - """Set this to customize the certificate file to verify the peer. - """ - self.cert_file = None - """client certificate file - """ - self.key_file = None - """client key file - """ - self.assert_hostname = None - """Set this to True/False to enable/disable SSL hostname verification. - """ - - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - """urllib3 connection pool's maximum number of connections saved - per pool. urllib3 uses 1 connection as default value, but this is - not the best value when you are making a lot of possibly parallel - requests to the same host, which is often the case here. - cpu_count * 5 is used as default value to increase performance. - """ - - self.proxy = None - """Proxy URL - """ - self.proxy_headers = None - """Proxy headers - """ - self.safe_chars_for_path_param = "" - """Safe chars for path_param - """ - self.retries = None - """Adding retries to override urllib3 default value 3 - """ - # Enable client side validation - self.client_side_validation = True - - # Options to pass down to the underlying urllib3 socket - self.socket_options = None - - def __deepcopy__(self, memo): - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - if k not in ("logger", "logger_file_handler"): - setattr(result, k, copy.deepcopy(v, memo)) - # shallow copy of loggers - result.logger = copy.copy(self.logger) - # use setters to configure loggers - result.logger_file = self.logger_file - result.debug = self.debug - return result - - def __setattr__(self, name, value): - object.__setattr__(self, name, value) - if name == "disabled_client_side_validations": - s = set(filter(None, value.split(","))) - for v in s: - if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise PineconeApiValueError("Invalid keyword: '{0}''".format(v)) - self._disabled_client_side_validations = s - - @classmethod - def set_default(cls, default): - """Set default instance of configuration. - - It stores default configuration, which can be - returned by get_default_copy method. - - :param default: object of Configuration - """ - cls._default = copy.deepcopy(default) - - @classmethod - def get_default_copy(cls): - """Return new instance of configuration. - - This method returns newly created, based on default constructor, - object of Configuration class or returns a copy of default - configuration passed by the set_default method. - - :return: The configuration object. - """ - if cls._default is not None: - return copy.deepcopy(cls._default) - return Configuration() - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in self.logger.items(): - logger.addHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in self.logger.items(): - logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in self.logger.items(): - logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier, alias=None): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :param alias: The alternative identifier of apiKey. - :return: The token for api key authentication. - """ - if self.refresh_api_key_hook is not None: - self.refresh_api_key_hook(self) - key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) - if key: - prefix = self.api_key_prefix.get(identifier) - if prefix: - return "%s %s" % (prefix, key) - else: - return key - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - auth = {} - if "ApiKeyAuth" in self.api_key: - auth["ApiKeyAuth"] = { - "type": "api_key", - "in": "header", - "key": "Api-Key", - "value": self.get_api_key_with_prefix("ApiKeyAuth"), - } - return auth - - def get_host_settings(self): - """Gets an array of host settings - - :return: An array of host settings - """ - return [{"url": "https://api.pinecone.io", "description": "Production API endpoints"}] - - def get_host_from_settings(self, index, variables=None, servers=None): - """Gets host URL based on the index and variables - :param index: array index of the host settings - :param variables: hash of variable and the corresponding value - :param servers: an array of host settings or None - :return: URL based on host settings - """ - if index is None: - return self._base_path - - variables = {} if variables is None else variables - servers = self.get_host_settings() if servers is None else servers - - try: - server = servers[index] - except IndexError: - raise ValueError( - "Invalid index {0} when selecting the host settings. Must be less than {1}".format( - index, len(servers) - ) - ) - - url = server["url"] - - # go through variables and replace placeholders - for variable_name, variable in server.get("variables", {}).items(): - used_value = variables.get(variable_name, variable["default_value"]) - - if "enum_values" in variable and used_value not in variable["enum_values"]: - raise ValueError( - "The variable `{0}` in the host URL has invalid value {1}. Must be {2}.".format( - variable_name, variables[variable_name], variable["enum_values"] - ) - ) - - url = url.replace("{" + variable_name + "}", used_value) - - return url - - @property - def host(self): - """Return generated host.""" - return self.get_host_from_settings(self.server_index, variables=self.server_variables) - - @host.setter - def host(self, value): - """Fix base path.""" - self._base_path = value - self.server_index = None - - def __repr__(self): - attrs = [ - f"host={self.host}", - "api_key=***", - f"api_key_prefix={self.api_key_prefix}", - f"connection_pool_maxsize={self.connection_pool_maxsize}", - f"discard_unknown_keys={self.discard_unknown_keys}", - f"disabled_client_side_validations={self.disabled_client_side_validations}", - f"server_index={self.server_index}", - f"server_variables={self.server_variables}", - f"server_operation_index={self.server_operation_index}", - f"server_operation_variables={self.server_operation_variables}", - f"ssl_ca_cert={self.ssl_ca_cert}", - ] - return f"Configuration({', '.join(attrs)})" +from pinecone.config import OpenApiConfiguration as Configuration \ No newline at end of file diff --git a/pinecone/openapi_support/configuration_lazy.py b/pinecone/openapi_support/configuration_lazy.py new file mode 100644 index 00000000..7d278f9f --- /dev/null +++ b/pinecone/openapi_support/configuration_lazy.py @@ -0,0 +1,7 @@ +""" +Lazy import for the Configuration class to avoid loading the entire openapi_support package. +""" + +from ..config.openapi_configuration import Configuration + +__all__ = ["Configuration"] \ No newline at end of file diff --git a/pinecone/openapi_support/endpoint_utils.py b/pinecone/openapi_support/endpoint_utils.py index 13522e85..867232b6 100644 --- a/pinecone/openapi_support/endpoint_utils.py +++ b/pinecone/openapi_support/endpoint_utils.py @@ -2,7 +2,7 @@ from .exceptions import PineconeApiTypeError, PineconeApiValueError from typing import Optional, Dict, Tuple, TypedDict, List, Literal, Any from .types import PropertyValidationTypedDict -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .model_utils import validate_and_convert_types, check_allowed_values, check_validations diff --git a/pinecone/openapi_support/exceptions.py b/pinecone/openapi_support/exceptions.py index fcc37da3..987403f0 100644 --- a/pinecone/openapi_support/exceptions.py +++ b/pinecone/openapi_support/exceptions.py @@ -1,140 +1 @@ -class PineconeException(Exception): - """The base exception class for all exceptions in the Pinecone Python SDK""" - - -class PineconeApiTypeError(PineconeException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: - """Raises an exception for TypeErrors - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list): a list of keys an indices to get to the - current_item - None if unset - valid_classes (tuple): the primitive classes that current item - should be an instance of - None if unset - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - None if unset - """ - self.path_to_item = path_to_item - self.valid_classes = valid_classes - self.key_type = key_type - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiTypeError, self).__init__(full_msg) - - -class PineconeApiValueError(PineconeException, ValueError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list) the path to the exception in the - received_data dict. None if unset - """ - - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiValueError, self).__init__(full_msg) - - -class PineconeApiAttributeError(PineconeException, AttributeError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Raised when an attribute reference or assignment fails. - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiAttributeError, self).__init__(full_msg) - - -class PineconeApiKeyError(PineconeException, KeyError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiKeyError, self).__init__(full_msg) - - -class PineconeApiException(PineconeException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """Custom error messages for exception""" - error_message = "({0})\nReason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format(self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message - - -class NotFoundException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(NotFoundException, self).__init__(status, reason, http_resp) - - -class UnauthorizedException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(UnauthorizedException, self).__init__(status, reason, http_resp) - - -class ForbiddenException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ForbiddenException, self).__init__(status, reason, http_resp) - - -class ServiceException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ServiceException, self).__init__(status, reason, http_resp) - - -def render_path(path_to_item): - """Returns a string representation of a path""" - result = "" - for pth in path_to_item: - if isinstance(pth, int): - result += "[{0}]".format(pth) - else: - result += "['{0}']".format(pth) - return result +from pinecone.exceptions import * \ No newline at end of file diff --git a/pinecone/openapi_support/model_utils.py b/pinecone/openapi_support/model_utils.py index 4fc4cf0f..163f94b4 100644 --- a/pinecone/openapi_support/model_utils.py +++ b/pinecone/openapi_support/model_utils.py @@ -1,5 +1,4 @@ from datetime import date, datetime # noqa: F401 -from dateutil.parser import parse import inspect import io @@ -1149,6 +1148,8 @@ def deserialize_primitive(data, klass, path_to_item): additional_message = "" try: if klass in {datetime, date}: + from dateutil.parser import parse + additional_message = ( "If you need your parameter to have a fallback " "string value, please set its type as `type: {}` in your " diff --git a/pinecone/openapi_support/rest_aiohttp.py b/pinecone/openapi_support/rest_aiohttp.py index c7121a11..3cab099a 100644 --- a/pinecone/openapi_support/rest_aiohttp.py +++ b/pinecone/openapi_support/rest_aiohttp.py @@ -2,7 +2,7 @@ import certifi import json from .rest_utils import RestClientInterface, RESTResponse, raise_exceptions_or_return -from .configuration import Configuration +from ..config.openapi_configuration import Configuration class AiohttpRestClient(RestClientInterface): diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index 85d008da..0c1a1c5a 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -4,7 +4,7 @@ import os from typing import Optional from urllib.parse import urlencode, quote -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .rest_utils import raise_exceptions_or_return, RESTResponse, RestClientInterface import urllib3 diff --git a/pinecone/control/pinecone.py b/pinecone/pinecone.py similarity index 93% rename from pinecone/control/pinecone.py rename to pinecone/pinecone.py index 87ae5b9d..a694dcb0 100644 --- a/pinecone/control/pinecone.py +++ b/pinecone/pinecone.py @@ -6,21 +6,21 @@ from pinecone.config import PineconeConfig, ConfigBuilder -from pinecone.utils import normalize_host, PluginAware +from pinecone.utils import normalize_host, PluginAware, docslinks from .langchain_import_warnings import _build_langchain_attribute_error_message -from pinecone.utils import docslinks logger = logging.getLogger(__name__) """ @private """ if TYPE_CHECKING: - from .db_control import DBControl - from pinecone.data import ( + from pinecone.db_data import ( _Index as Index, _Inference as Inference, _IndexAsyncio as IndexAsyncio, ) - from pinecone.enums import ( + from pinecone.db_control import DBControl + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict + from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -30,7 +30,7 @@ GcpRegion, AzureRegion, ) - from pinecone.models import ( + from pinecone.db_control.models import ( ServerlessSpec, PodSpec, IndexModel, @@ -38,7 +38,6 @@ CollectionList, IndexEmbed, ) - from .types import CreateIndexForModelEmbedTypedDict class Pinecone(PluginAware, LegacyPineconeDBControlInterface): @@ -95,8 +94,7 @@ def __init__( self._db_control: Optional["DBControl"] = None # Lazy initialization """ @private """ - # Initialize PluginAware first, which will then call PineconeDBControlInterface.__init__ - super().__init__() + super().__init__() # Initialize PluginAware @property def inference(self) -> "Inference": @@ -104,7 +102,7 @@ def inference(self) -> "Inference": Inference is a namespace where an instance of the `pinecone.data.features.inference.inference.Inference` class is lazily created and cached. """ if self._inference is None: - from pinecone.data import _Inference + from pinecone.db_data import _Inference self._inference = _Inference(config=self.config, openapi_config=self.openapi_config) return self._inference @@ -115,7 +113,7 @@ def db(self) -> "DBControl": DBControl is a namespace where an instance of the `pinecone.control.db_control.DBControl` class is lazily created and cached. """ if self._db_control is None: - from .db_control import DBControl + from pinecone.db_control import DBControl self._db_control = DBControl( config=self.config, @@ -221,7 +219,7 @@ def from_documents(*args, **kwargs): raise AttributeError(_build_langchain_attribute_error_message("from_documents")) def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": - from pinecone.data import _Index + from pinecone.db_data import _Index if name == "" and host == "": raise ValueError("Either name or host must be specified") @@ -249,7 +247,7 @@ def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": ) def IndexAsyncio(self, host: str, **kwargs) -> "IndexAsyncio": - from pinecone.data import _IndexAsyncio + from pinecone.db_data import _IndexAsyncio api_key = self.config.api_key openapi_config = self.openapi_config diff --git a/pinecone/control/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py similarity index 91% rename from pinecone/control/pinecone_asyncio.py rename to pinecone/pinecone_asyncio.py index efd5ca5e..c0b552a4 100644 --- a/pinecone/control/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -6,14 +6,13 @@ from pinecone.utils import normalize_host from pinecone.utils import docslinks -from .request_factory import PineconeDBControlRequestFactory from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface from .pinecone import check_realistic_host if TYPE_CHECKING: - from .types import CreateIndexForModelEmbedTypedDict - from pinecone.data import _IndexAsyncio - from pinecone.enums import ( + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict + from pinecone.db_data import _IndexAsyncio + from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -23,7 +22,7 @@ GcpRegion, AzureRegion, ) - from pinecone.models import ( + from pinecone.db_control.models import ( ServerlessSpec, PodSpec, IndexModel, @@ -151,7 +150,7 @@ async def main(): def inference(self): """Dynamically create and cache the AsyncioInference instance.""" if self._inference is None: - from pinecone.data import _AsyncioInference + from pinecone.db_data import _AsyncioInference self._inference = _AsyncioInference(api_client=self.index_api.api_client) return self._inference @@ -159,7 +158,7 @@ def inference(self): @property def db(self): if self._db_control is None: - from .db_control_asyncio import DBControlAsyncio + from .db_control.db_control_asyncio import DBControlAsyncio self._db_control = DBControlAsyncio( config=self.config, @@ -181,16 +180,16 @@ async def create_index( vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, ) -> "IndexModel": - req = PineconeDBControlRequestFactory.create_index_request( - name=name, - spec=spec, - dimension=dimension, - metric=metric, - deletion_protection=deletion_protection, - vector_type=vector_type, + resp = await self.db.index.create( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, tags=tags, + timeout=timeout ) - resp = await self.db.index.create(create_index_request=req) return resp async def create_index_for_model( @@ -256,7 +255,7 @@ async def describe_collection(self, name: str): return await self.db.collection.describe(name=name) def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": - from pinecone.data import _IndexAsyncio + from pinecone.db_data import _IndexAsyncio api_key = self.config.api_key openapi_config = self.openapi_config diff --git a/pinecone/control/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py similarity index 99% rename from pinecone/control/pinecone_interface_asyncio.py rename to pinecone/pinecone_interface_asyncio.py index 139af7a5..31d1feba 100644 --- a/pinecone/control/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -7,7 +7,7 @@ from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi - from pinecone.models import ( + from pinecone.db_control.models import ( ServerlessSpec, PodSpec, IndexList, @@ -15,7 +15,7 @@ IndexModel, IndexEmbed, ) - from pinecone.enums import ( + from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -25,7 +25,7 @@ GcpRegion, AzureRegion, ) - from .types import CreateIndexForModelEmbedTypedDict + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict class PineconeAsyncioDBControlInterface(ABC): diff --git a/pinecone/utils/docslinks.py b/pinecone/utils/docslinks.py index a86dd1da..bab6a151 100644 --- a/pinecone/utils/docslinks.py +++ b/pinecone/utils/docslinks.py @@ -1,10 +1,11 @@ -from pinecone.core.openapi.db_control import API_VERSION +def versioned_url(template: str): + return lambda version: template.format(version) docslinks = { "README": "https://github.com/pinecone-io/pinecone-python-client/blob/main/README.md", "GITHUB_REPO": "https://github.com/pinecone-io/pinecone-python-client", "LANGCHAIN_IMPORT_KB_ARTICLE": "https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain", - "API_DESCRIBE_INDEX": "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index".format( - API_VERSION + "API_DESCRIBE_INDEX": versioned_url( + "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index", ), } diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index 8410397a..92093fcb 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -1,7 +1,7 @@ from typing import Any from .setup_openapi_client import build_plugin_setup_client from pinecone.config import Config -from pinecone.openapi_support.configuration import Configuration as OpenApiConfig +from pinecone.config.openapi_configuration import Configuration as OpenApiConfig from pinecone_plugin_interface import load_and_install as install_plugins import logging diff --git a/tests/integration/data/test_query_namespaces_sparse.py b/tests/integration/data/test_query_namespaces_sparse.py index 607798ea..958368b5 100644 --- a/tests/integration/data/test_query_namespaces_sparse.py +++ b/tests/integration/data/test_query_namespaces_sparse.py @@ -1,6 +1,6 @@ import pytest from ..helpers import random_string, poll_stats_for_namespace -from pinecone.data.query_results_aggregator import QueryResultsAggregatorInvalidTopKError +from pinecone.db_data.query_results_aggregator import QueryResultsAggregatorInvalidTopKError from pinecone import Vector, SparseValues diff --git a/tests/integration/data/test_search_and_upsert_records.py b/tests/integration/data/test_search_and_upsert_records.py index e83a5cd8..0a269a49 100644 --- a/tests/integration/data/test_search_and_upsert_records.py +++ b/tests/integration/data/test_search_and_upsert_records.py @@ -6,7 +6,7 @@ import os from pinecone import RerankModel, PineconeApiException -from pinecone.data import _Index +from pinecone.db_data import _Index logger = logging.getLogger(__name__) diff --git a/tests/integration/data/test_upsert_from_dataframe.py b/tests/integration/data/test_upsert_from_dataframe.py index 49bc9abc..4534bc4f 100644 --- a/tests/integration/data/test_upsert_from_dataframe.py +++ b/tests/integration/data/test_upsert_from_dataframe.py @@ -1,5 +1,5 @@ import pandas as pd -from pinecone.data import _Index +from pinecone.db_data import _Index from ..helpers import embedding_values, random_string diff --git a/tests/integration/data_asyncio/conftest.py b/tests/integration/data_asyncio/conftest.py index 6401e073..9769a5e9 100644 --- a/tests/integration/data_asyncio/conftest.py +++ b/tests/integration/data_asyncio/conftest.py @@ -2,7 +2,7 @@ import json import asyncio from ..helpers import get_environment_var, generate_index_name -from pinecone.data import _IndexAsyncio +from pinecone.db_data import _IndexAsyncio import logging from typing import Callable, Optional, Awaitable, Union diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 480585e5..6688f288 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -7,7 +7,7 @@ from typing import Any from datetime import datetime import json -from pinecone.data import _Index +from pinecone.db_data import _Index from typing import List logger = logging.getLogger(__name__) diff --git a/tests/perf/test_query_results_aggregator.py b/tests/perf/test_query_results_aggregator.py index 29ac4c35..9f33c149 100644 --- a/tests/perf/test_query_results_aggregator.py +++ b/tests/perf/test_query_results_aggregator.py @@ -1,5 +1,5 @@ import random -from pinecone.data.query_results_aggregator import QueryResultsAggregator +from pinecone.db_data.query_results_aggregator import QueryResultsAggregator def fake_results(i): diff --git a/tests/unit/data/test_bulk_import.py b/tests/unit/data/test_bulk_import.py index b1bcd4cc..c7ad5a14 100644 --- a/tests/unit/data/test_bulk_import.py +++ b/tests/unit/data/test_bulk_import.py @@ -6,7 +6,7 @@ ImportErrorMode as ImportErrorModeGeneratedClass, ) -from pinecone.data.features.bulk_import import ImportFeatureMixin, ImportErrorMode +from pinecone.db_data.features.bulk_import import ImportFeatureMixin, ImportErrorMode def build_client_w_faked_response(mocker, body: str, status: int = 200): diff --git a/tests/unit/data/test_request_factory.py b/tests/unit/data/test_request_factory.py index 087436c9..ea04acdf 100644 --- a/tests/unit/data/test_request_factory.py +++ b/tests/unit/data/test_request_factory.py @@ -1,5 +1,5 @@ import pytest -from pinecone.data.request_factory import ( +from pinecone.db_data.request_factory import ( IndexRequestFactory, SearchQuery, SearchQueryVector, diff --git a/tests/unit/data/test_vector_factory.py b/tests/unit/data/test_vector_factory.py index 52fd1eac..adeeaf9c 100644 --- a/tests/unit/data/test_vector_factory.py +++ b/tests/unit/data/test_vector_factory.py @@ -2,7 +2,7 @@ import pandas as pd import pytest -from pinecone.data.vector_factory import VectorFactory +from pinecone.db_data.vector_factory import VectorFactory from pinecone import Vector, SparseValues, ListConversionException from pinecone.core.openapi.db_data.models import ( Vector as OpenApiVector, diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index f33519b6..f1a00508 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -1,7 +1,7 @@ from pinecone import Pinecone from pinecone.exceptions.exceptions import PineconeConfigurationError from pinecone.config import PineconeConfig -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration import pytest import os diff --git a/tests/unit/test_config_builder.py b/tests/unit/test_config_builder.py index 3122c080..7307f153 100644 --- a/tests/unit/test_config_builder.py +++ b/tests/unit/test_config_builder.py @@ -1,6 +1,6 @@ import pytest -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration from pinecone.config import ConfigBuilder from pinecone import PineconeConfigurationError diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 72ed7422..6e880016 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -1,7 +1,7 @@ import pandas as pd import pytest -from pinecone.data import _Index +from pinecone.db_data import _Index import pinecone.core.openapi.db_data.models as oai from pinecone import QueryResponse, UpsertResponse, Vector diff --git a/tests/unit/test_query_results_aggregator.py b/tests/unit/test_query_results_aggregator.py index b40a11d2..d3c97f87 100644 --- a/tests/unit/test_query_results_aggregator.py +++ b/tests/unit/test_query_results_aggregator.py @@ -1,4 +1,4 @@ -from pinecone.data.query_results_aggregator import ( +from pinecone.db_data.query_results_aggregator import ( QueryResultsAggregator, QueryResultsAggregatorInvalidTopKError, ) diff --git a/tests/upgrade/test_v6_upgrade.py b/tests/upgrade/test_v6_upgrade.py new file mode 100644 index 00000000..3806576c --- /dev/null +++ b/tests/upgrade/test_v6_upgrade.py @@ -0,0 +1,174 @@ +import pinecone + +class TestV6Upgrade: + def test_v6_upgrade_root_imports(self): + v6_dir_items = [ + 'CollectionDescription', + 'CollectionList', + 'Config', + 'ConfigBuilder', + 'DeleteRequest', + 'DescribeIndexStatsRequest', + 'DescribeIndexStatsResponse', + 'FetchResponse', + 'ForbiddenException', + 'ImportErrorMode', + 'Index', + 'IndexList', + 'IndexModel', + 'ListConversionException', + 'MetadataDictionaryExpectedError', + 'NotFoundException', + 'Pinecone', + 'PineconeApiAttributeError', + 'PineconeApiException', + 'PineconeApiKeyError', + 'PineconeApiTypeError', + 'PineconeApiValueError', + 'PineconeConfig', + 'PineconeConfigurationError', + 'PineconeException', + 'PineconeProtocolError', + 'PodSpec', + 'PodSpecDefinition', + 'QueryRequest', + 'QueryResponse', + 'RpcStatus', + 'ScoredVector', + 'ServerlessSpec', + 'ServerlessSpecDefinition', + 'ServiceException', + 'SingleQueryResults', + 'SparseValues', + 'SparseValuesDictionaryExpectedError', + 'SparseValuesMissingKeysError', + 'SparseValuesTypeError', + 'TqdmExperimentalWarning', + 'UnauthorizedException', + 'UpdateRequest', + 'UpsertRequest', + 'UpsertResponse', + 'Vector', + 'VectorDictionaryExcessKeysError', + 'VectorDictionaryMissingKeysError', + 'VectorTupleLengthError', + '__builtins__', + '__cached__', + '__doc__', + '__file__', + '__loader__', + '__name__', + '__package__', + '__path__', + '__spec__', + '__version__', + 'config', + 'configure_index', + 'control', + 'core', + 'core_ea', + 'create_collection', + 'create_index', + 'data', + 'delete_collection', + 'delete_index', + 'deprecation_warnings', + 'describe_collection', + 'describe_index', + 'errors', + 'exceptions', + 'features', + 'index', + 'index_host_store', + 'init', + 'install_repr_overrides', + 'langchain_import_warnings', + 'list_collections', + 'list_indexes', + 'logging', + 'models', + 'openapi', + 'os', + 'pinecone', + 'pinecone_config', + 'repr_overrides', + 'scale_index', + 'sparse_vector_factory', + 'utils', + 'vector_factory', + 'warnings' + ] + + missing_items = [] + for item in v6_dir_items: + if item not in dir(pinecone): + missing_items.append(item) + + assert len(missing_items) == 0, f"Missing items: {missing_items}" + + def test_v6_upgrade_data_imports(self): + v6_data_dir_items = [ + 'DescribeIndexStatsResponse', + 'EmbedModel', + 'FetchResponse', + 'ImportErrorMode', + 'Index', + 'IndexClientInstantiationError', + 'Inference', + 'InferenceInstantiationError', + 'MetadataDictionaryExpectedError', + 'QueryResponse', + 'RerankModel', + 'SearchQuery', + 'SearchQueryVector', + 'SearchRerank', + 'SparseValues', + 'SparseValuesDictionaryExpectedError', + 'SparseValuesMissingKeysError', + 'SparseValuesTypeError', + 'UpsertResponse', + 'Vector', + 'VectorDictionaryExcessKeysError', + 'VectorDictionaryMissingKeysError', + 'VectorTupleLengthError', + '_AsyncioInference', + '_Index', + '_IndexAsyncio', + '_Inference', + '__builtins__', + '__cached__', + '__doc__', + '__file__', + '__loader__', + '__name__', + '__package__', + '__path__', + '__spec__', + 'dataclasses', + 'errors', + 'features', + 'fetch_response', + 'import_error', + 'index', + 'index_asyncio', + 'index_asyncio_interface', + 'interfaces', + 'query_results_aggregator', + 'request_factory', + 'search_query', + 'search_query_vector', + 'search_rerank', + 'sparse_values', + 'sparse_values_factory', + 'types', + 'utils', + 'vector', + 'vector_factory' + ] + + missing_items = [] + for item in v6_data_dir_items: + if item not in dir(pinecone.db_data): + missing_items.append(item) + + assert len(missing_items) == 0, f"Missing items: {missing_items}" From 67323cb7866e22082cd680603ec826a35f4e652f Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 9 Apr 2025 13:07:25 -0400 Subject: [PATCH 14/48] WIP --- pinecone/config/openapi_config_factory.py | 4 +- pinecone/db_control/enums/clouds.py | 24 +- pinecone/db_data/__init__.py | 12 +- pinecone/openapi_support/api_client.py | 3 + pinecone/openapi_support/configuration.py | 1 - .../openapi_support/configuration_lazy.py | 2 +- pinecone/openapi_support/exceptions.py | 2 +- pinecone/pinecone_asyncio.py | 14 +- pinecone/utils/docslinks.py | 3 +- tests/upgrade/test_v6_upgrade.py | 301 +++++++++--------- 10 files changed, 188 insertions(+), 178 deletions(-) diff --git a/pinecone/config/openapi_config_factory.py b/pinecone/config/openapi_config_factory.py index d7730458..56a1de64 100644 --- a/pinecone/config/openapi_config_factory.py +++ b/pinecone/config/openapi_config_factory.py @@ -71,9 +71,7 @@ def _get_socket_options( # Source: https://www.finbourne.com/blog/the-mysterious-hanging-client-tcp-keep-alives # urllib3.connection.HTTPConnection.default_socket_options - socket_params = [ - (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - ] + socket_params = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] if not do_keep_alive: return socket_params diff --git a/pinecone/db_control/enums/clouds.py b/pinecone/db_control/enums/clouds.py index 192b3da5..8903f40a 100644 --- a/pinecone/db_control/enums/clouds.py +++ b/pinecone/db_control/enums/clouds.py @@ -3,10 +3,10 @@ class CloudProvider(Enum): """Cloud providers available for use with Pinecone serverless indexes - + This list could expand or change over time as more cloud providers are supported. - Check the Pinecone documentation for the most up-to-date list of supported cloud - providers. If you want to use a cloud provider that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported cloud + providers. If you want to use a cloud provider that is not listed here, you can pass a string value directly without using this enum. """ @@ -17,10 +17,10 @@ class CloudProvider(Enum): class AwsRegion(Enum): """AWS (Amazon Web Services) regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ @@ -31,10 +31,10 @@ class AwsRegion(Enum): class GcpRegion(Enum): """GCP (Google Cloud Platform) regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ @@ -44,10 +44,10 @@ class GcpRegion(Enum): class AzureRegion(Enum): """Azure regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ diff --git a/pinecone/db_data/__init__.py b/pinecone/db_data/__init__.py index 7c76f04f..f2db9a63 100644 --- a/pinecone/db_data/__init__.py +++ b/pinecone/db_data/__init__.py @@ -30,6 +30,7 @@ import warnings + def _get_deprecated_import(name, from_module, to_module): warnings.warn( f"The import of `{name}` from `{from_module}` has moved to `{to_module}`. " @@ -39,15 +40,22 @@ def _get_deprecated_import(name, from_module, to_module): DeprecationWarning, ) # Import from the new location - from pinecone.inference import Inference as _Inference, AsyncioInference as _AsyncioInference, RerankModel, EmbedModel + from pinecone.inference import ( + Inference as _Inference, + AsyncioInference as _AsyncioInference, + RerankModel, + EmbedModel, + ) + return locals()[name] + moved = ["_Inference", "_AsyncioInference", "RerankModel", "EmbedModel"] + def __getattr__(name): if name in locals(): return locals()[name] elif name in moved: return _get_deprecated_import(name, "pinecone.data", "pinecone.inference") raise AttributeError(f"module '{__name__}' has no attribute '{name}'") - diff --git a/pinecone/openapi_support/api_client.py b/pinecone/openapi_support/api_client.py index 9e1f1f7e..ee1e4649 100644 --- a/pinecone/openapi_support/api_client.py +++ b/pinecone/openapi_support/api_client.py @@ -70,6 +70,7 @@ def pool(self) -> "ThreadPool": """ if self._pool is None: from multiprocessing.pool import ThreadPool + atexit.register(self.close) self._pool = ThreadPool(self.pool_threads) return self._pool @@ -78,6 +79,7 @@ def pool(self) -> "ThreadPool": def threadpool_executor(self) -> "ThreadPoolExecutor": if self._threadpool_executor is None: from concurrent.futures import ThreadPoolExecutor + self._threadpool_executor = ThreadPoolExecutor(max_workers=self.pool_threads) return self._threadpool_executor @@ -189,6 +191,7 @@ def __call_api( # deserialize response data if response_type: from .deserializer import Deserializer + Deserializer.decode_response(response_type=response_type, response=response_data) return_data = Deserializer.deserialize( response=response_data, diff --git a/pinecone/openapi_support/configuration.py b/pinecone/openapi_support/configuration.py index cd31b0b9..e69de29b 100644 --- a/pinecone/openapi_support/configuration.py +++ b/pinecone/openapi_support/configuration.py @@ -1 +0,0 @@ -from pinecone.config import OpenApiConfiguration as Configuration \ No newline at end of file diff --git a/pinecone/openapi_support/configuration_lazy.py b/pinecone/openapi_support/configuration_lazy.py index 7d278f9f..27e90a34 100644 --- a/pinecone/openapi_support/configuration_lazy.py +++ b/pinecone/openapi_support/configuration_lazy.py @@ -4,4 +4,4 @@ from ..config.openapi_configuration import Configuration -__all__ = ["Configuration"] \ No newline at end of file +__all__ = ["Configuration"] diff --git a/pinecone/openapi_support/exceptions.py b/pinecone/openapi_support/exceptions.py index 987403f0..5729b13e 100644 --- a/pinecone/openapi_support/exceptions.py +++ b/pinecone/openapi_support/exceptions.py @@ -1 +1 @@ -from pinecone.exceptions import * \ No newline at end of file +from pinecone.exceptions import * diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index c0b552a4..3da739f7 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -181,14 +181,14 @@ async def create_index( tags: Optional[Dict[str, str]] = None, ) -> "IndexModel": resp = await self.db.index.create( - name=name, - spec=spec, - dimension=dimension, - metric=metric, - deletion_protection=deletion_protection, - vector_type=vector_type, + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, tags=tags, - timeout=timeout + timeout=timeout, ) return resp diff --git a/pinecone/utils/docslinks.py b/pinecone/utils/docslinks.py index bab6a151..cdfe66cd 100644 --- a/pinecone/utils/docslinks.py +++ b/pinecone/utils/docslinks.py @@ -1,11 +1,12 @@ def versioned_url(template: str): return lambda version: template.format(version) + docslinks = { "README": "https://github.com/pinecone-io/pinecone-python-client/blob/main/README.md", "GITHUB_REPO": "https://github.com/pinecone-io/pinecone-python-client", "LANGCHAIN_IMPORT_KB_ARTICLE": "https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain", "API_DESCRIBE_INDEX": versioned_url( - "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index", + "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index" ), } diff --git a/tests/upgrade/test_v6_upgrade.py b/tests/upgrade/test_v6_upgrade.py index 3806576c..358ce0e5 100644 --- a/tests/upgrade/test_v6_upgrade.py +++ b/tests/upgrade/test_v6_upgrade.py @@ -1,102 +1,103 @@ import pinecone + class TestV6Upgrade: def test_v6_upgrade_root_imports(self): v6_dir_items = [ - 'CollectionDescription', - 'CollectionList', - 'Config', - 'ConfigBuilder', - 'DeleteRequest', - 'DescribeIndexStatsRequest', - 'DescribeIndexStatsResponse', - 'FetchResponse', - 'ForbiddenException', - 'ImportErrorMode', - 'Index', - 'IndexList', - 'IndexModel', - 'ListConversionException', - 'MetadataDictionaryExpectedError', - 'NotFoundException', - 'Pinecone', - 'PineconeApiAttributeError', - 'PineconeApiException', - 'PineconeApiKeyError', - 'PineconeApiTypeError', - 'PineconeApiValueError', - 'PineconeConfig', - 'PineconeConfigurationError', - 'PineconeException', - 'PineconeProtocolError', - 'PodSpec', - 'PodSpecDefinition', - 'QueryRequest', - 'QueryResponse', - 'RpcStatus', - 'ScoredVector', - 'ServerlessSpec', - 'ServerlessSpecDefinition', - 'ServiceException', - 'SingleQueryResults', - 'SparseValues', - 'SparseValuesDictionaryExpectedError', - 'SparseValuesMissingKeysError', - 'SparseValuesTypeError', - 'TqdmExperimentalWarning', - 'UnauthorizedException', - 'UpdateRequest', - 'UpsertRequest', - 'UpsertResponse', - 'Vector', - 'VectorDictionaryExcessKeysError', - 'VectorDictionaryMissingKeysError', - 'VectorTupleLengthError', - '__builtins__', - '__cached__', - '__doc__', - '__file__', - '__loader__', - '__name__', - '__package__', - '__path__', - '__spec__', - '__version__', - 'config', - 'configure_index', - 'control', - 'core', - 'core_ea', - 'create_collection', - 'create_index', - 'data', - 'delete_collection', - 'delete_index', - 'deprecation_warnings', - 'describe_collection', - 'describe_index', - 'errors', - 'exceptions', - 'features', - 'index', - 'index_host_store', - 'init', - 'install_repr_overrides', - 'langchain_import_warnings', - 'list_collections', - 'list_indexes', - 'logging', - 'models', - 'openapi', - 'os', - 'pinecone', - 'pinecone_config', - 'repr_overrides', - 'scale_index', - 'sparse_vector_factory', - 'utils', - 'vector_factory', - 'warnings' + "CollectionDescription", + "CollectionList", + "Config", + "ConfigBuilder", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "FetchResponse", + "ForbiddenException", + "ImportErrorMode", + "Index", + "IndexList", + "IndexModel", + "ListConversionException", + "MetadataDictionaryExpectedError", + "NotFoundException", + "Pinecone", + "PineconeApiAttributeError", + "PineconeApiException", + "PineconeApiKeyError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeConfig", + "PineconeConfigurationError", + "PineconeException", + "PineconeProtocolError", + "PodSpec", + "PodSpecDefinition", + "QueryRequest", + "QueryResponse", + "RpcStatus", + "ScoredVector", + "ServerlessSpec", + "ServerlessSpecDefinition", + "ServiceException", + "SingleQueryResults", + "SparseValues", + "SparseValuesDictionaryExpectedError", + "SparseValuesMissingKeysError", + "SparseValuesTypeError", + "TqdmExperimentalWarning", + "UnauthorizedException", + "UpdateRequest", + "UpsertRequest", + "UpsertResponse", + "Vector", + "VectorDictionaryExcessKeysError", + "VectorDictionaryMissingKeysError", + "VectorTupleLengthError", + "__builtins__", + "__cached__", + "__doc__", + "__file__", + "__loader__", + "__name__", + "__package__", + "__path__", + "__spec__", + "__version__", + "config", + "configure_index", + "control", + "core", + "core_ea", + "create_collection", + "create_index", + "data", + "delete_collection", + "delete_index", + "deprecation_warnings", + "describe_collection", + "describe_index", + "errors", + "exceptions", + "features", + "index", + "index_host_store", + "init", + "install_repr_overrides", + "langchain_import_warnings", + "list_collections", + "list_indexes", + "logging", + "models", + "openapi", + "os", + "pinecone", + "pinecone_config", + "repr_overrides", + "scale_index", + "sparse_vector_factory", + "utils", + "vector_factory", + "warnings", ] missing_items = [] @@ -108,62 +109,62 @@ def test_v6_upgrade_root_imports(self): def test_v6_upgrade_data_imports(self): v6_data_dir_items = [ - 'DescribeIndexStatsResponse', - 'EmbedModel', - 'FetchResponse', - 'ImportErrorMode', - 'Index', - 'IndexClientInstantiationError', - 'Inference', - 'InferenceInstantiationError', - 'MetadataDictionaryExpectedError', - 'QueryResponse', - 'RerankModel', - 'SearchQuery', - 'SearchQueryVector', - 'SearchRerank', - 'SparseValues', - 'SparseValuesDictionaryExpectedError', - 'SparseValuesMissingKeysError', - 'SparseValuesTypeError', - 'UpsertResponse', - 'Vector', - 'VectorDictionaryExcessKeysError', - 'VectorDictionaryMissingKeysError', - 'VectorTupleLengthError', - '_AsyncioInference', - '_Index', - '_IndexAsyncio', - '_Inference', - '__builtins__', - '__cached__', - '__doc__', - '__file__', - '__loader__', - '__name__', - '__package__', - '__path__', - '__spec__', - 'dataclasses', - 'errors', - 'features', - 'fetch_response', - 'import_error', - 'index', - 'index_asyncio', - 'index_asyncio_interface', - 'interfaces', - 'query_results_aggregator', - 'request_factory', - 'search_query', - 'search_query_vector', - 'search_rerank', - 'sparse_values', - 'sparse_values_factory', - 'types', - 'utils', - 'vector', - 'vector_factory' + "DescribeIndexStatsResponse", + "EmbedModel", + "FetchResponse", + "ImportErrorMode", + "Index", + "IndexClientInstantiationError", + "Inference", + "InferenceInstantiationError", + "MetadataDictionaryExpectedError", + "QueryResponse", + "RerankModel", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + "SparseValues", + "SparseValuesDictionaryExpectedError", + "SparseValuesMissingKeysError", + "SparseValuesTypeError", + "UpsertResponse", + "Vector", + "VectorDictionaryExcessKeysError", + "VectorDictionaryMissingKeysError", + "VectorTupleLengthError", + "_AsyncioInference", + "_Index", + "_IndexAsyncio", + "_Inference", + "__builtins__", + "__cached__", + "__doc__", + "__file__", + "__loader__", + "__name__", + "__package__", + "__path__", + "__spec__", + "dataclasses", + "errors", + "features", + "fetch_response", + "import_error", + "index", + "index_asyncio", + "index_asyncio_interface", + "interfaces", + "query_results_aggregator", + "request_factory", + "search_query", + "search_query_vector", + "search_rerank", + "sparse_values", + "sparse_values_factory", + "types", + "utils", + "vector", + "vector_factory", ] missing_items = [] From b7bdd4f471b25f249382c87326996cf2e09fe881 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Thu, 17 Apr 2025 15:45:44 -0400 Subject: [PATCH 15/48] WIP --- pinecone/__init__.py | 148 +++++++++++- pinecone/config/config.py | 2 +- pinecone/config/openapi_configuration.py | 2 +- pinecone/control/__init__.py | 9 + pinecone/data/__init__.py | 10 + pinecone/data/features/__init__.py | 10 + .../data/features/bulk_imports/__init__.py | 10 + .../features/inference/__init__.py | 3 +- pinecone/db_control/models/collection_list.py | 4 +- pinecone/db_control/models/index_list.py | 2 +- pinecone/db_control/request_factory.py | 31 ++- pinecone/db_data/dataclasses/search_rerank.py | 2 +- pinecone/db_data/features/__init__.py | 13 - pinecone/db_data/models/__init__.py | 1 + .../db_data/types/search_rerank_typed_dict.py | 2 +- pinecone/models/__init__.py | 9 + pinecone/pinecone.py | 13 +- pinecone/utils/find_legacy_imports.py | 143 +++++++++++ pinecone/utils/lazy_imports.py | 76 ++++++ pinecone/utils/legacy_imports.py | 112 +++++++++ tests/unit/test_control.py | 51 ++-- tests/unit/test_plugin_aware.py | 7 +- tests/upgrade/test_all.py | 28 +++ tests/upgrade/test_reorganization.py | 19 ++ tests/upgrade/test_v6_upgrade.py | 222 ++++++++++++------ 25 files changed, 789 insertions(+), 140 deletions(-) create mode 100644 pinecone/control/__init__.py create mode 100644 pinecone/data/__init__.py create mode 100644 pinecone/data/features/__init__.py create mode 100644 pinecone/data/features/bulk_imports/__init__.py rename pinecone/{db_data => data}/features/inference/__init__.py (53%) delete mode 100644 pinecone/db_data/features/__init__.py create mode 100644 pinecone/db_data/models/__init__.py create mode 100644 pinecone/models/__init__.py create mode 100755 pinecone/utils/find_legacy_imports.py create mode 100644 pinecone/utils/lazy_imports.py create mode 100644 pinecone/utils/legacy_imports.py create mode 100644 tests/upgrade/test_all.py create mode 100644 tests/upgrade/test_reorganization.py diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 4af444d7..f7d8fce9 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -2,22 +2,160 @@ .. include:: ../pdoc/README.md """ -from .deprecated_plugins import check_for_deprecated_plugins +from .deprecated_plugins import check_for_deprecated_plugins as _check_for_deprecated_plugins from .deprecation_warnings import * from .pinecone import Pinecone from .pinecone_asyncio import PineconeAsyncio from .exceptions import * -# from .config import * -# from .db_control import * -# from .db_data import * from .utils import __version__ import logging +# Set up lazy import handling +from .utils.lazy_imports import setup_lazy_imports as _setup_lazy_imports + +_inference_lazy_imports = { + "RerankModel": ("pinecone.inference", "RerankModel"), + "EmbedModel": ("pinecone.inference", "EmbedModel"), +} + +_db_data_lazy_imports = { + "Vector": ("pinecone.db_data.models", "Vector"), + "FetchResponse": ("pinecone.db_data.models", "FetchResponse"), + "DeleteRequest": ("pinecone.db_data.models", "DeleteRequest"), + "DescribeIndexStatsRequest": ("pinecone.db_data.models", "DescribeIndexStatsRequest"), + "DescribeIndexStatsResponse": ("pinecone.db_data.models", "IndexDescription"), + "RpcStatus": ("pinecone.db_data.models", "RpcStatus"), + "ScoredVector": ("pinecone.db_data.models", "ScoredVector"), + "SingleQueryResults": ("pinecone.db_data.models", "SingleQueryResults"), + "QueryRequest": ("pinecone.db_data.models", "QueryRequest"), + "QueryResponse": ("pinecone.db_data.models", "QueryResponse"), + "SearchQuery": ("pinecone.db_data.dataclasses", "SearchQuery"), + "SearchQueryVector": ("pinecone.db_data.dataclasses", "SearchQueryVector"), + "SearchRerank": ("pinecone.db_data.dataclasses", "SearchRerank"), + "UpsertResponse": ("pinecone.db_data.models", "UpsertResponse"), + "UpdateRequest": ("pinecone.db_data.models", "UpdateRequest"), + "SparseValues": ("pinecone.db_data.models", "SparseValues"), +} + +_db_control_lazy_imports = { + "CloudProvider": ("pinecone.db_control.enums", "CloudProvider"), + "AwsRegion": ("pinecone.db_control.enums", "AwsRegion"), + "GcpRegion": ("pinecone.db_control.enums", "GcpRegion"), + "AzureRegion": ("pinecone.db_control.enums", "AzureRegion"), + "PodIndexEnvironment": ("pinecone.db_control.enums", "PodIndexEnvironment"), + "Metric": ("pinecone.db_control.enums", "Metric"), + "VectorType": ("pinecone.db_control.enums", "VectorType"), + "DeletionProtection": ("pinecone.db_control.enums", "DeletionProtection"), + "CollectionDescription": ("pinecone.db_control.models", "CollectionDescription"), + "CollectionList": ("pinecone.db_control.models", "CollectionList"), + "IndexList": ("pinecone.db_control.models", "IndexList"), + "IndexModel": ("pinecone.db_control.models", "IndexModel"), + "IndexEmbed": ("pinecone.db_control.models", "IndexEmbed"), + "ServerlessSpec": ("pinecone.db_control.models", "ServerlessSpec"), + "ServerlessSpecDefinition": ("pinecone.db_control.models", "ServerlessSpecDefinition"), + "PodSpec": ("pinecone.db_control.models", "PodSpec"), + "PodSpecDefinition": ("pinecone.db_control.models", "PodSpecDefinition"), + "PodType": ("pinecone.db_control.enums", "PodType"), +} + +_config_lazy_imports = { + "Config": ("pinecone.config", "Config"), + "ConfigBuilder": ("pinecone.config", "ConfigBuilder"), + "PineconeConfig": ("pinecone.config", "PineconeConfig"), +} + +# Define imports to be lazily loaded +_LAZY_IMPORTS = { + **_inference_lazy_imports, + **_db_data_lazy_imports, + **_db_control_lazy_imports, + **_config_lazy_imports, +} + +# Set up the lazy import handler +_setup_lazy_imports(_LAZY_IMPORTS) + # Raise an exception if the user is attempting to use the SDK with # deprecated plugins installed in their project. -check_for_deprecated_plugins() +_check_for_deprecated_plugins() # Silence annoying log messages from the plugin interface logging.getLogger("pinecone_plugin_interface").setLevel(logging.CRITICAL) + +__all__ = [ + "__version__", + # Deprecated top-levelfunctions + "init", + "create_index", + "delete_index", + "list_indexes", + "describe_index", + "configure_index", + "scale_index", + "create_collection", + "delete_collection", + "describe_collection", + "list_collections", + # Primary client classes + "Pinecone", + "PineconeAsyncio", + # Config classes + "Config", + "ConfigBuilder", + "PineconeConfig", + # DB control classes + "CloudProvider", + "AwsRegion", + "GcpRegion", + "AzureRegion", + "PodIndexEnvironment", + "Metric", + "VectorType", + "DeletionProtection", + "CollectionDescription", + "CollectionList", + "IndexList", + "IndexModel", + "IndexEmbed", + "ServerlessSpec", + "ServerlessSpecDefinition", + "PodSpec", + "PodSpecDefinition", + "PodType", + "Vector", + "FetchResponse", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "RpcStatus", + "ScoredVector", + "SingleQueryResults", + "QueryRequest", + "QueryResponse", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + "UpsertResponse", + "UpdateRequest", + "SparseValues", + # Inference classes + "RerankModel", + "EmbedModel", + # Exception classes + "PineconeException", + "PineconeApiException", + "PineconeConfigurationError", + "PineconeProtocolError", + "PineconeApiAttributeError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeApiKeyError", + "PineconeApiException", + "NotFoundException", + "UnauthorizedException", + "ForbiddenException", + "ServiceException", + "ListConversionException", +] diff --git a/pinecone/config/config.py b/pinecone/config/config.py index 86c03649..9029c45a 100644 --- a/pinecone/config/config.py +++ b/pinecone/config/config.py @@ -5,7 +5,7 @@ from pinecone.config.openapi_config_factory import OpenApiConfigFactory if TYPE_CHECKING: - from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration + from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration # Duplicated this util to help resolve circular imports diff --git a/pinecone/config/openapi_configuration.py b/pinecone/config/openapi_configuration.py index 9be701be..fce6defc 100644 --- a/pinecone/config/openapi_configuration.py +++ b/pinecone/config/openapi_configuration.py @@ -82,7 +82,7 @@ class Configuration: You can programmatically set the cookie: - conf = pinecone.openapi_support.Configuration( + conf = pinecone.config.openapi_configuration.Configuration( api_key={'cookieAuth': 'abc123'} api_key_prefix={'cookieAuth': 'JSESSIONID'} ) diff --git a/pinecone/control/__init__.py b/pinecone/control/__init__.py new file mode 100644 index 00000000..4f04e477 --- /dev/null +++ b/pinecone/control/__init__.py @@ -0,0 +1,9 @@ +import warnings + +warnings.warn( + "The module at `pinecone.control` has moved to `pinecone.db_control`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.db_control import * diff --git a/pinecone/data/__init__.py b/pinecone/data/__init__.py new file mode 100644 index 00000000..0268ac16 --- /dev/null +++ b/pinecone/data/__init__.py @@ -0,0 +1,10 @@ +import warnings + +warnings.warn( + "The module at `pinecone.data` has moved to `pinecone.db_data`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.db_data import * diff --git a/pinecone/data/features/__init__.py b/pinecone/data/features/__init__.py new file mode 100644 index 00000000..fd64a554 --- /dev/null +++ b/pinecone/data/features/__init__.py @@ -0,0 +1,10 @@ +import warnings + +warnings.warn( + "The module at `pinecone.data.features` has moved to `pinecone.db_data.features`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.db_data.features import * diff --git a/pinecone/data/features/bulk_imports/__init__.py b/pinecone/data/features/bulk_imports/__init__.py new file mode 100644 index 00000000..740d503b --- /dev/null +++ b/pinecone/data/features/bulk_imports/__init__.py @@ -0,0 +1,10 @@ +import warnings + +warnings.warn( + "The module at `pinecone.data.features.bulk_import` has moved to `pinecone.db_data.features.bulk_import`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.db_data.features.bulk_import import * diff --git a/pinecone/db_data/features/inference/__init__.py b/pinecone/data/features/inference/__init__.py similarity index 53% rename from pinecone/db_data/features/inference/__init__.py rename to pinecone/data/features/inference/__init__.py index 897b4f4f..b0918dd5 100644 --- a/pinecone/db_data/features/inference/__init__.py +++ b/pinecone/data/features/inference/__init__.py @@ -2,8 +2,7 @@ warnings.warn( "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " - "Please update your imports from `from pinecone.data.features.inference import Inference, AsyncioInference, RerankModel, EmbedModel` " - "to `from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel`. " + "Please update your imports. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) diff --git a/pinecone/db_control/models/collection_list.py b/pinecone/db_control/models/collection_list.py index 508ec685..f36a9708 100644 --- a/pinecone/db_control/models/collection_list.py +++ b/pinecone/db_control/models/collection_list.py @@ -1,5 +1,7 @@ import json -from pinecone.core.openapi.db_control.models import CollectionList as OpenAPICollectionList +from pinecone.core.openapi.db_control.model.collection_list import ( + CollectionList as OpenAPICollectionList, +) class CollectionList: diff --git a/pinecone/db_control/models/index_list.py b/pinecone/db_control/models/index_list.py index 71242e24..e918b4f5 100644 --- a/pinecone/db_control/models/index_list.py +++ b/pinecone/db_control/models/index_list.py @@ -1,5 +1,5 @@ import json -from pinecone.core.openapi.db_control.models import IndexList as OpenAPIIndexList +from pinecone.core.openapi.db_control.model.index_list import IndexList as OpenAPIIndexList from .index_model import IndexModel from typing import List diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index 2e796745..c2ecc905 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -2,25 +2,35 @@ from typing import Optional, Dict, Any, Union from enum import Enum +from pinecone.utils import parse_non_empty_args, convert_enum_to_string -from pinecone.utils import convert_enum_to_string -from pinecone.core.openapi.db_control.models import ( - CreateCollectionRequest, +from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest +from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, +) +from pinecone.core.openapi.db_control.model.create_index_for_model_request_embed import ( CreateIndexForModelRequestEmbed, - CreateIndexRequest, - ConfigureIndexRequest, +) +from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest +from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest +from pinecone.core.openapi.db_control.model.configure_index_request_spec import ( ConfigureIndexRequestSpec, +) +from pinecone.core.openapi.db_control.model.configure_index_request_spec_pod import ( ConfigureIndexRequestSpecPod, +) +from pinecone.core.openapi.db_control.model.deletion_protection import ( DeletionProtection as DeletionProtectionModel, - IndexSpec, - IndexTags, +) +from pinecone.core.openapi.db_control.model.index_spec import IndexSpec +from pinecone.core.openapi.db_control.model.index_tags import IndexTags +from pinecone.core.openapi.db_control.model.serverless_spec import ( ServerlessSpec as ServerlessSpecModel, - PodSpec as PodSpecModel, - PodSpecMetadataConfig, ) +from pinecone.core.openapi.db_control.model.pod_spec import PodSpec as PodSpecModel +from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig + from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed -from pinecone.utils import parse_non_empty_args from pinecone.db_control.enums import ( Metric, @@ -58,6 +68,7 @@ def __parse_deletion_protection( deletion_protection: Union[DeletionProtection, str], ) -> DeletionProtectionModel: deletion_protection = convert_enum_to_string(deletion_protection) + print(deletion_protection) if deletion_protection in ["enabled", "disabled"]: return DeletionProtectionModel(deletion_protection) else: diff --git a/pinecone/db_data/dataclasses/search_rerank.py b/pinecone/db_data/dataclasses/search_rerank.py index 1b9534ba..0ac4ca4e 100644 --- a/pinecone/db_data/dataclasses/search_rerank.py +++ b/pinecone/db_data/dataclasses/search_rerank.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from typing import Optional, Dict, Any, List -from ..features.inference import RerankModel +from pinecone.inference import RerankModel @dataclass diff --git a/pinecone/db_data/features/__init__.py b/pinecone/db_data/features/__init__.py deleted file mode 100644 index b8f2fddb..00000000 --- a/pinecone/db_data/features/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -import warnings - -# Display a warning for old imports -warnings.warn( - "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " - "Please update your imports from `from pinecone.data.features.inference import Inference, AsyncioInference, RerankModel, EmbedModel` " - "to `from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel`. " - "This warning will become an error in a future version of the Pinecone Python SDK.", - DeprecationWarning, -) - -# Import from the new location to maintain backward compatibility -from pinecone.inference import Inference, AsyncioInference, RerankModel, EmbedModel diff --git a/pinecone/db_data/models/__init__.py b/pinecone/db_data/models/__init__.py new file mode 100644 index 00000000..a14d3600 --- /dev/null +++ b/pinecone/db_data/models/__init__.py @@ -0,0 +1 @@ +from pinecone.core.openapi.db_data.models import * diff --git a/pinecone/db_data/types/search_rerank_typed_dict.py b/pinecone/db_data/types/search_rerank_typed_dict.py index 89c4f8d8..2d04fe82 100644 --- a/pinecone/db_data/types/search_rerank_typed_dict.py +++ b/pinecone/db_data/types/search_rerank_typed_dict.py @@ -1,5 +1,5 @@ from typing import TypedDict, Optional, Union, Dict, Any -from ..features.inference import RerankModel +from pinecone.inference import RerankModel class SearchRerankTypedDict(TypedDict): diff --git a/pinecone/models/__init__.py b/pinecone/models/__init__.py new file mode 100644 index 00000000..74a1658c --- /dev/null +++ b/pinecone/models/__init__.py @@ -0,0 +1,9 @@ +import warnings + +warnings.warn( + "The module at `pinecone.models` has moved to `pinecone.db_control.models`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) + +from pinecone.db_control.models import * diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index a694dcb0..38462390 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -42,10 +42,7 @@ class Pinecone(PluginAware, LegacyPineconeDBControlInterface): """ - A client for interacting with Pinecone's vector database. - - This class implements methods for managing and interacting with Pinecone resources - such as collections and indexes. + A client for interacting with Pinecone APIs. """ def __init__( @@ -127,12 +124,10 @@ def create_index( name: str, spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int] = None, - metric: Optional[Union["Metric", str]] = "Metric.COSINE", + metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", - vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", tags: Optional[Dict[str, str]] = None, ) -> "IndexModel": return self.db.index.create( diff --git a/pinecone/utils/find_legacy_imports.py b/pinecone/utils/find_legacy_imports.py new file mode 100755 index 00000000..5421de28 --- /dev/null +++ b/pinecone/utils/find_legacy_imports.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +""" +Script to identify legacy imports that were previously available via star imports. + +This script analyzes the codebase to find all imports that were previously available +via star imports but are no longer imported at the top level. +""" + +import ast +import os +from typing import Set + + +def find_star_imports(file_path: str) -> Set[str]: + """ + Find all star imports in a file. + + Args: + file_path: Path to the file to analyze. + + Returns: + Set of module names that are imported with star imports. + """ + with open(file_path, "r") as f: + content = f.read() + + try: + tree = ast.parse(content) + except SyntaxError: + print(f"Warning: Could not parse {file_path}") + return set() + + star_imports = set() + + for node in ast.walk(tree): + if isinstance(node, ast.ImportFrom) and node.names[0].name == "*": + module_name = node.module + if module_name: + star_imports.add(module_name) + + return star_imports + + +def find_imported_names(file_path: str) -> Set[str]: + """ + Find all names that are imported in a file. + + Args: + file_path: Path to the file to analyze. + + Returns: + Set of imported names. + """ + with open(file_path, "r") as f: + content = f.read() + + try: + tree = ast.parse(content) + except SyntaxError: + print(f"Warning: Could not parse {file_path}") + return set() + + imported_names = set() + + for node in ast.walk(tree): + if isinstance(node, ast.Import): + for name in node.names: + imported_names.add(name.name) + elif isinstance(node, ast.ImportFrom): + for name in node.names: + if name.name != "*": + imported_names.add(name.name) + + return imported_names + + +def find_module_exports(module_path: str) -> Set[str]: + """ + Find all names that are exported by a module. + + Args: + module_path: Path to the module to analyze. + + Returns: + Set of exported names. + """ + try: + module = __import__(module_path, fromlist=["*"]) + return set(dir(module)) + except ImportError: + print(f"Warning: Could not import {module_path}") + return set() + + +def main(): + """ + Main function to find legacy imports. + """ + # Get the package root directory + package_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + # Find the __init__.py file + init_file = os.path.join(package_root, "__init__.py") + + # Find star imports in the __init__.py file + star_imports = find_star_imports(init_file) + + # Find all imported names in the __init__.py file + imported_names = find_imported_names(init_file) + + # Find all module exports + module_exports = {} + for module_name in star_imports: + module_exports[module_name] = find_module_exports(module_name) + + # Find all files in the package + package_files = [] + for root, _, files in os.walk(package_root): + for file in files: + if file.endswith(".py") and not file.startswith("__"): + package_files.append(os.path.join(root, file)) + + # Find all imports in the package + package_imports = set() + for file in package_files: + package_imports.update(find_imported_names(file)) + + # Find legacy imports + legacy_imports = {} + for module_name, exports in module_exports.items(): + for export in exports: + if export in package_imports and export not in imported_names: + legacy_imports[f"pinecone.{export}"] = (module_name, export) + + # Print the legacy imports + print("LEGACY_IMPORTS = {") + for legacy_name, (module_path, actual_name) in sorted(legacy_imports.items()): + print(f" '{legacy_name}': ('{module_path}', '{actual_name}'),") + print("}") + + +if __name__ == "__main__": + main() diff --git a/pinecone/utils/lazy_imports.py b/pinecone/utils/lazy_imports.py new file mode 100644 index 00000000..0a55c8f4 --- /dev/null +++ b/pinecone/utils/lazy_imports.py @@ -0,0 +1,76 @@ +""" +Lazy import handler for Pinecone. + +This module provides a way to lazily load imports that were previously +available via star imports but are no longer imported at the top level. +""" + +import importlib +import sys +from types import ModuleType +from typing import Dict, Optional, Tuple, cast + +# Dictionary mapping import names to their actual module paths +# Format: 'name': ('module_path', 'actual_name') +LAZY_IMPORTS: Dict[str, Tuple[str, str]] = { + # Example: 'Vector': ('pinecone.db_data.models', 'Vector') + # Add all your lazy imports here +} + + +class LazyModule: + def __init__(self, original_module, lazy_imports): + self._original_module = original_module + self._lazy_imports = lazy_imports + self._loaded_attrs = {} + + def __dir__(self): + # Get the base directory listing from the original module + base_dir = dir(self._original_module) + + # Add lazy-loaded items + lazy_dir = list(self._lazy_imports.keys()) + + # Return combined list + return sorted(set(base_dir + lazy_dir)) + + def __getattr__(self, name): + # First try the original module + try: + return getattr(self._original_module, name) + except AttributeError: + pass + + # Then try lazy imports + if name in self._lazy_imports: + if name not in self._loaded_attrs: + module_path, item_name = self._lazy_imports[name] + module = importlib.import_module(module_path) + self._loaded_attrs[name] = getattr(module, item_name) + return self._loaded_attrs[name] + + raise AttributeError(f"module '{self._original_module.__name__}' has no attribute '{name}'") + + +def setup_lazy_imports(lazy_imports: Optional[Dict[str, Tuple[str, str]]] = None) -> None: + """ + Set up the lazy import handler. + + Args: + lazy_imports: Optional dictionary of imports to handle lazily. + If None, uses the default LAZY_IMPORTS dictionary. + """ + if lazy_imports is None: + lazy_imports = LAZY_IMPORTS + + # Only proceed if the pinecone module is already loaded + if "pinecone" not in sys.modules: + return + + # Create a proxy for the pinecone module + original_module = sys.modules["pinecone"] + proxy = LazyModule(original_module, lazy_imports) + + # Replace the pinecone module with our proxy + # Use a type cast to satisfy the type checker + sys.modules["pinecone"] = cast(ModuleType, proxy) diff --git a/pinecone/utils/legacy_imports.py b/pinecone/utils/legacy_imports.py new file mode 100644 index 00000000..9013acdd --- /dev/null +++ b/pinecone/utils/legacy_imports.py @@ -0,0 +1,112 @@ +""" +Legacy import handler for Pinecone. + +This module provides a simple way to handle legacy imports that were previously +available via star imports but are no longer imported at the top level. +""" + +import importlib +import sys +from types import ModuleType +from typing import Dict, Optional, Set, Any, Tuple, cast + +# Dictionary mapping legacy import names to their actual module paths +# Format: 'name': ('module_path', 'actual_name') +LEGACY_IMPORTS: Dict[str, Tuple[str, str]] = { + # Example: 'Vector': ('pinecone.db_data.models', 'Vector') + # Add all your legacy imports here +} + + +class LegacyImportProxy: + """ + A proxy module that handles legacy imports with warnings. + + This class is used to replace the pinecone module in sys.modules + to handle legacy imports that were previously available via star imports. + """ + + def __init__(self, original_module: Any, legacy_imports: Dict[str, Tuple[str, str]]): + """ + Initialize the proxy module. + + Args: + original_module: The original module to proxy. + legacy_imports: Dictionary of legacy imports to handle. + """ + self._original_module = original_module + self._legacy_imports = legacy_imports + self._warned_imports: Set[str] = set() + self._loaded_modules: Dict[str, Any] = {} + + def __getattr__(self, name: str) -> Any: + """ + Handle attribute access for legacy imports. + + Args: + name: The name of the attribute being accessed. + + Returns: + The requested attribute. + + Raises: + AttributeError: If the attribute cannot be found. + """ + # First, try to get the attribute from the original module + try: + return getattr(self._original_module, name) + except AttributeError: + pass + + # Check if this is a legacy import + if name in self._legacy_imports: + module_path, actual_name = self._legacy_imports[name] + + # Only warn once per import + # if name not in self._warned_imports: + # warnings.warn( + # f"Importing '{name}' directly from 'pinecone' is deprecated. " + # f"Please import it from '{module_path}' instead. " + # f"This import will be removed in a future version.", + # DeprecationWarning, + # stacklevel=2 + # ) + # self._warned_imports.add(name) + + # Load the module if not already loaded + if module_path not in self._loaded_modules: + try: + self._loaded_modules[module_path] = importlib.import_module(module_path) + except ImportError: + raise AttributeError(f"module 'pinecone' has no attribute '{name}'") + + # Get the actual object + module = self._loaded_modules[module_path] + if hasattr(module, actual_name): + return getattr(module, actual_name) + + raise AttributeError(f"module 'pinecone' has no attribute '{name}'") + + +def setup_legacy_imports(legacy_imports: Optional[Dict[str, Tuple[str, str]]] = None) -> None: + """ + Set up the legacy import handler. + + Args: + legacy_imports: Optional dictionary of legacy imports to handle. + If None, uses the default LEGACY_IMPORTS dictionary. + """ + if legacy_imports is None: + legacy_imports = LEGACY_IMPORTS + + # Only proceed if the pinecone module is already loaded + if "pinecone" not in sys.modules: + return + + # Create a proxy for the pinecone module + original_module = sys.modules["pinecone"] + proxy = LegacyImportProxy(original_module, legacy_imports) + + # Replace the pinecone module with our proxy + # Use a type cast to satisfy the type checker + sys.modules["pinecone"] = cast(ModuleType, proxy) diff --git a/tests/unit/test_control.py b/tests/unit/test_control.py index ad3b2872..da252063 100644 --- a/tests/unit/test_control.py +++ b/tests/unit/test_control.py @@ -87,37 +87,38 @@ def test_plugins_are_lazily_loaded(self): def test_default_host(self): p = Pinecone(api_key="123-456-789") - assert p.index_api.api_client.configuration.host == "https://api.pinecone.io" + assert p.db.index_api.api_client.configuration.host == "https://api.pinecone.io" def test_passing_host(self): p = Pinecone(api_key="123-456-789", host="my-host.pinecone.io") - assert p.index_api.api_client.configuration.host == "https://my-host.pinecone.io" + assert p.db.index_api.api_client.configuration.host == "https://my-host.pinecone.io" def test_passing_additional_headers(self): extras = {"header1": "my-value", "header2": "my-value2"} p = Pinecone(api_key="123-456-789", additional_headers=extras) for key, value in extras.items(): - assert p.index_api.api_client.default_headers[key] == value - assert "User-Agent" in p.index_api.api_client.default_headers - assert "X-Pinecone-API-Version" in p.index_api.api_client.default_headers - assert "header1" in p.index_api.api_client.default_headers - assert "header2" in p.index_api.api_client.default_headers - assert len(p.index_api.api_client.default_headers) == 4 + assert p.db.index_api.api_client.default_headers[key] == value + assert "User-Agent" in p.db.index_api.api_client.default_headers + assert "X-Pinecone-API-Version" in p.db.index_api.api_client.default_headers + assert "header1" in p.db.index_api.api_client.default_headers + assert "header2" in p.db.index_api.api_client.default_headers + assert len(p.db.index_api.api_client.default_headers) == 4 def test_overwrite_useragent(self): # This doesn't seem like a common use case, but we may want to allow this # when embedding the client in other pinecone tools such as canopy. extras = {"User-Agent": "test-user-agent"} p = Pinecone(api_key="123-456-789", additional_headers=extras) - assert "X-Pinecone-API-Version" in p.index_api.api_client.default_headers - assert p.index_api.api_client.default_headers["User-Agent"] == "test-user-agent" - assert len(p.index_api.api_client.default_headers) == 2 + assert "X-Pinecone-API-Version" in p.db.index_api.api_client.default_headers + assert p.db.index_api.api_client.default_headers["User-Agent"] == "test-user-agent" + assert len(p.db.index_api.api_client.default_headers) == 2 def test_set_source_tag_in_useragent(self): p = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", p.index_api.api_client.user_agent) is not None + re.search(r"source_tag=test_source_tag", p.db.index_api.api_client.user_agent) + is not None ) @pytest.mark.parametrize( @@ -149,8 +150,8 @@ def test_create_index_with_timeout( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db.index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db.index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -160,8 +161,8 @@ def test_create_index_with_timeout( timeout=timeout_value, ) - assert p.index_api.create_index.call_count == 1 - assert p.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db.index_api.create_index.call_count == 1 + assert p.db.index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls @pytest.mark.parametrize( @@ -210,7 +211,7 @@ def test_create_index_with_spec_dictionary(self, mocker, index_spec): p = Pinecone(api_key="123-456-789") mock_api = MagicMock() - mocker.patch.object(p, "index_api", mock_api) + mocker.patch.object(p.db, "index_api", mock_api) p.create_index(name="my-index", dimension=10, spec=index_spec) @@ -245,8 +246,8 @@ def test_create_index_from_source_collection( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db.index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db.index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -256,17 +257,19 @@ def test_create_index_from_source_collection( timeout=timeout_value, ) - assert p.index_api.create_index.call_count == 1 - assert p.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db.index_api.create_index.call_count == 1 + assert p.db.index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls def test_create_index_when_timeout_exceeded(self, mocker): with pytest.raises(TimeoutError): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db.index_api, "create_index") describe_index_response = [description_with_status(False)] * 5 - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_response) + mocker.patch.object( + p.db.index_api, "describe_index", side_effect=describe_index_response + ) mocker.patch("time.sleep") p.create_index( @@ -276,7 +279,7 @@ def test_create_index_when_timeout_exceeded(self, mocker): def test_list_indexes_returns_iterable(self, mocker, index_list_response): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "list_indexes", side_effect=[index_list_response]) + mocker.patch.object(p.db.index_api, "list_indexes", side_effect=[index_list_response]) response = p.list_indexes() assert [i.name for i in response] == ["index1", "index2", "index3"] diff --git a/tests/unit/test_plugin_aware.py b/tests/unit/test_plugin_aware.py index 7f4329d1..315bd225 100644 --- a/tests/unit/test_plugin_aware.py +++ b/tests/unit/test_plugin_aware.py @@ -1,7 +1,6 @@ import pytest from pinecone.utils.plugin_aware import PluginAware -from pinecone.config import Config -from pinecone.openapi_support.configuration import Configuration as OpenApiConfig +from pinecone.config import Config, OpenApiConfiguration class TestPluginAware: @@ -22,7 +21,7 @@ def test_correctly_raise_attribute_errors(self): class Foo(PluginAware): def __init__(self): self.config = Config() - self.openapi_config = OpenApiConfig() + self.openapi_config = OpenApiConfiguration() self.pool_threads = 1 super().__init__() @@ -38,7 +37,7 @@ def test_plugins_are_lazily_loaded(self): class Pinecone(PluginAware): def __init__(self): self.config = Config() - self.openapi_config = OpenApiConfig() + self.openapi_config = OpenApiConfiguration() self.pool_threads = 10 super().__init__() diff --git a/tests/upgrade/test_all.py b/tests/upgrade/test_all.py new file mode 100644 index 00000000..acabf620 --- /dev/null +++ b/tests/upgrade/test_all.py @@ -0,0 +1,28 @@ +class TestAll: + def test_all_is_complete(self): + """Test that __all__ is complete and accurate.""" + # Import the module + import pinecone + + # Get all public names (those that don't start with _) + public_names = {name for name in dir(pinecone) if not name.startswith("_")} + + # Get __all__ if it exists, otherwise empty set + all_names = set(getattr(pinecone, "__all__", [])) + + # Check that __all__ exists + assert hasattr(pinecone, "__all__"), "Module should have __all__ defined" + + # Check that all names in __all__ are actually importable + for name in all_names: + assert getattr(pinecone, name) is not None, f"Name {name} in __all__ is not importable" + + # Check that all public names are in __all__ + missing_from_all = public_names - all_names + for name in missing_from_all: + print(f"Public name {name} is not in __all__") + assert not missing_from_all, f"Public names not in __all__: {missing_from_all}" + + # Check that __all__ doesn't contain any private names + private_in_all = {name for name in all_names if name.startswith("_")} + assert not private_in_all, f"Private names in __all__: {private_in_all}" diff --git a/tests/upgrade/test_reorganization.py b/tests/upgrade/test_reorganization.py new file mode 100644 index 00000000..331681b7 --- /dev/null +++ b/tests/upgrade/test_reorganization.py @@ -0,0 +1,19 @@ +import pytest + + +class TestReorganization: + def test_data(self): + with pytest.warns(DeprecationWarning) as warning_info: + from pinecone.data import Index + + assert Index is not None + assert len(warning_info) > 0 + assert "has moved to" in str(warning_info[0].message) + + def test_config(self): + with pytest.warns(DeprecationWarning) as warning_info: + from pinecone.config import PineconeConfig + + assert PineconeConfig is not None + assert len(warning_info) > 0 + assert "has moved to" in str(warning_info[0].message) diff --git a/tests/upgrade/test_v6_upgrade.py b/tests/upgrade/test_v6_upgrade.py index 358ce0e5..6532f65f 100644 --- a/tests/upgrade/test_v6_upgrade.py +++ b/tests/upgrade/test_v6_upgrade.py @@ -1,7 +1,79 @@ import pinecone +import logging +logger = logging.getLogger(__name__) + + +class TestExpectedImports_UpgradeFromV6: + def test_mapped_data_imports(self): + data_imports = [ + "Vector", + "QueryRequest", + "FetchResponse", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "RpcStatus", + "ScoredVector", + "ServiceException", + "SingleQueryResults", + "QueryResponse", + "RerankModel", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + "UpsertResponse", + "UpdateRequest", + ] + + control_imports = [ + "CollectionDescription", + "CollectionList", + "ServerlessSpec", + "ServerlessSpecDefinition", + "PodSpec", + "PodSpecDefinition", + # 'ForbiddenException', + # 'ImportErrorMode', + # 'Index', + "IndexList", + "IndexModel", + # 'ListConversionException', + # 'MetadataDictionaryExpectedError', + # 'NotFoundException', + ] + + config_imports = [ + "Config", + "ConfigBuilder", + "PineconeConfig", + "PineconeConfigurationError", + "PineconeException", + "PineconeProtocolError", + "PineconeApiAttributeError", + "PineconeApiException", + ] + + exception_imports = [ + "PineconeConfigurationError", + "PineconeProtocolError", + "PineconeException", + "PineconeApiAttributeError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeApiKeyError", + "PineconeApiException", + "NotFoundException", + "UnauthorizedException", + "ForbiddenException", + "ServiceException", + "ListConversionException", + ] + mapped_imports = data_imports + control_imports + config_imports + exception_imports + + for import_name in mapped_imports: + assert hasattr(pinecone, import_name), f"Import {import_name} not found in pinecone" -class TestV6Upgrade: def test_v6_upgrade_root_imports(self): v6_dir_items = [ "CollectionDescription", @@ -100,76 +172,92 @@ def test_v6_upgrade_root_imports(self): "warnings", ] + intentionally_removed_items = ["os"] + + expected_items = [item for item in v6_dir_items if item not in intentionally_removed_items] + missing_items = [] - for item in v6_dir_items: - if item not in dir(pinecone): + for item in expected_items: + if not hasattr(pinecone, item): missing_items.append(item) + logger.debug(f"Exported: ❌ {item}") + else: + logger.debug(f"Exported: ✅ {item}") + + extra_items = [] + for item in intentionally_removed_items: + if hasattr(pinecone, item): + extra_items.append(item) + logger.debug(f"Removed: ❌ {item}") + else: + logger.debug(f"Removed: ✅ {item}") assert len(missing_items) == 0, f"Missing items: {missing_items}" + assert len(extra_items) == 0, f"Extra items: {extra_items}" - def test_v6_upgrade_data_imports(self): - v6_data_dir_items = [ - "DescribeIndexStatsResponse", - "EmbedModel", - "FetchResponse", - "ImportErrorMode", - "Index", - "IndexClientInstantiationError", - "Inference", - "InferenceInstantiationError", - "MetadataDictionaryExpectedError", - "QueryResponse", - "RerankModel", - "SearchQuery", - "SearchQueryVector", - "SearchRerank", - "SparseValues", - "SparseValuesDictionaryExpectedError", - "SparseValuesMissingKeysError", - "SparseValuesTypeError", - "UpsertResponse", - "Vector", - "VectorDictionaryExcessKeysError", - "VectorDictionaryMissingKeysError", - "VectorTupleLengthError", - "_AsyncioInference", - "_Index", - "_IndexAsyncio", - "_Inference", - "__builtins__", - "__cached__", - "__doc__", - "__file__", - "__loader__", - "__name__", - "__package__", - "__path__", - "__spec__", - "dataclasses", - "errors", - "features", - "fetch_response", - "import_error", - "index", - "index_asyncio", - "index_asyncio_interface", - "interfaces", - "query_results_aggregator", - "request_factory", - "search_query", - "search_query_vector", - "search_rerank", - "sparse_values", - "sparse_values_factory", - "types", - "utils", - "vector", - "vector_factory", - ] + # def test_v6_upgrade_data_imports(self): + # v6_data_dir_items = [ + # "DescribeIndexStatsResponse", + # "EmbedModel", + # "FetchResponse", + # "ImportErrorMode", + # "Index", + # "IndexClientInstantiationError", + # "Inference", + # "InferenceInstantiationError", + # "MetadataDictionaryExpectedError", + # "QueryResponse", + # "RerankModel", + # "SearchQuery", + # "SearchQueryVector", + # "SearchRerank", + # "SparseValues", + # "SparseValuesDictionaryExpectedError", + # "SparseValuesMissingKeysError", + # "SparseValuesTypeError", + # "UpsertResponse", + # "Vector", + # "VectorDictionaryExcessKeysError", + # "VectorDictionaryMissingKeysError", + # "VectorTupleLengthError", + # "_AsyncioInference", + # "_Index", + # "_IndexAsyncio", + # "_Inference", + # "__builtins__", + # "__cached__", + # "__doc__", + # "__file__", + # "__loader__", + # "__name__", + # "__package__", + # "__path__", + # "__spec__", + # "dataclasses", + # "errors", + # "features", + # "fetch_response", + # "import_error", + # "index", + # "index_asyncio", + # "index_asyncio_interface", + # "interfaces", + # "query_results_aggregator", + # "request_factory", + # "search_query", + # "search_query_vector", + # "search_rerank", + # "sparse_values", + # "sparse_values_factory", + # "types", + # "utils", + # "vector", + # "vector_factory", + # ] - missing_items = [] - for item in v6_data_dir_items: - if item not in dir(pinecone.db_data): - missing_items.append(item) + # missing_items = [] + # for item in v6_data_dir_items: + # if item not in dir(pinecone.db_data): + # missing_items.append(item) - assert len(missing_items) == 0, f"Missing items: {missing_items}" + # assert len(missing_items) == 0, f"Missing items: {missing_items}" From 0584c6320f4aca69ad9bf73ca01a0961c8b5e61f Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 23 Apr 2025 15:32:55 -0400 Subject: [PATCH 16/48] Add missing exports --- pinecone/__init__.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/pinecone/__init__.py b/pinecone/__init__.py index f7d8fce9..2714bb6c 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -37,6 +37,22 @@ "UpsertResponse": ("pinecone.db_data.models", "UpsertResponse"), "UpdateRequest": ("pinecone.db_data.models", "UpdateRequest"), "SparseValues": ("pinecone.db_data.models", "SparseValues"), + "ImportErrorMode": ("pinecone.core.openapi.db_data.model", "ImportErrorMode"), + "VectorDictionaryMissingKeysError": ( + "pinecone.db_data.errors", + "VectorDictionaryMissingKeysError", + ), + "VectorDictionaryExcessKeysError": ( + "pinecone.db_data.errors", + "VectorDictionaryExcessKeysError", + ), + "VectorTupleLengthError": ("pinecone.db_data.errors", "VectorTupleLengthError"), + "SparseValuesTypeError": ("pinecone.db_data.errors", "SparseValuesTypeError"), + "SparseValuesMissingKeysError": ("pinecone.db_data.errors", "SparseValuesMissingKeysError"), + "SparseValuesDictionaryExpectedError": ( + "pinecone.db_data.errors", + "SparseValuesDictionaryExpectedError", + ), } _db_control_lazy_imports = { @@ -105,7 +121,7 @@ "Config", "ConfigBuilder", "PineconeConfig", - # DB control classes + # OpenAPI classes "CloudProvider", "AwsRegion", "GcpRegion", @@ -119,6 +135,7 @@ "IndexList", "IndexModel", "IndexEmbed", + "ImportErrorMode", "ServerlessSpec", "ServerlessSpecDefinition", "PodSpec", @@ -158,4 +175,10 @@ "ForbiddenException", "ServiceException", "ListConversionException", + "VectorDictionaryMissingKeysError", + "VectorDictionaryExcessKeysError", + "VectorTupleLengthError", + "SparseValuesTypeError", + "SparseValuesMissingKeysError", + "SparseValuesDictionaryExpectedError", ] From cd15bf90dc29995daaca72698ceb881fbd7deb3b Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 25 Apr 2025 14:42:27 -0400 Subject: [PATCH 17/48] Fix unit tests --- pinecone/__init__.py | 10 +++++----- pyproject.toml | 2 +- tests/unit/test_config.py | 14 +++++++++++--- tests/unit/test_index_initialization.py | 3 ++- tests/unit/utils/test_docs_links.py | 10 ++++++++-- 5 files changed, 27 insertions(+), 12 deletions(-) diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 2714bb6c..f228eddf 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -21,7 +21,11 @@ } _db_data_lazy_imports = { - "Vector": ("pinecone.db_data.models", "Vector"), + "Vector": ("pinecone.db_data.dataclasses", "Vector"), + "SparseValues": ("pinecone.db_data.dataclasses", "SparseValues"), + "SearchQuery": ("pinecone.db_data.dataclasses", "SearchQuery"), + "SearchQueryVector": ("pinecone.db_data.dataclasses", "SearchQueryVector"), + "SearchRerank": ("pinecone.db_data.dataclasses", "SearchRerank"), "FetchResponse": ("pinecone.db_data.models", "FetchResponse"), "DeleteRequest": ("pinecone.db_data.models", "DeleteRequest"), "DescribeIndexStatsRequest": ("pinecone.db_data.models", "DescribeIndexStatsRequest"), @@ -31,12 +35,8 @@ "SingleQueryResults": ("pinecone.db_data.models", "SingleQueryResults"), "QueryRequest": ("pinecone.db_data.models", "QueryRequest"), "QueryResponse": ("pinecone.db_data.models", "QueryResponse"), - "SearchQuery": ("pinecone.db_data.dataclasses", "SearchQuery"), - "SearchQueryVector": ("pinecone.db_data.dataclasses", "SearchQueryVector"), - "SearchRerank": ("pinecone.db_data.dataclasses", "SearchRerank"), "UpsertResponse": ("pinecone.db_data.models", "UpsertResponse"), "UpdateRequest": ("pinecone.db_data.models", "UpdateRequest"), - "SparseValues": ("pinecone.db_data.models", "SparseValues"), "ImportErrorMode": ("pinecone.core.openapi.db_data.model", "ImportErrorMode"), "VectorDictionaryMissingKeysError": ( "pinecone.db_data.errors", diff --git a/pyproject.toml b/pyproject.toml index 5b8a11ac..7a1b9a0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -155,7 +155,7 @@ docstring-code-line-length = "dynamic" [tool.ruff.lint.per-file-ignores] # F403 Allow star imports # F401 allow imported but unused -"__init__.py" = ["F401", "F403"] +"__init__.py" = ["F401", "F403", "F405"] # E402 Allow module level import not at top of file so # tqdm warnings can be disabled ahead of loading any code diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index f1a00508..1da981ad 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -103,7 +103,11 @@ def test_config_pool_threads(self): pc = Pinecone( api_key="test-api-key", host="test-controller-host.pinecone.io", pool_threads=10 ) - assert pc.index_api.api_client.pool_threads == 10 + # DBControl object is created lazily, so we need to access this property + # to trigger the setup so we can inspect the config + assert pc.db is not None + + assert pc.db.index_api.api_client.pool_threads == 10 idx = pc.Index(host="my-index-host.pinecone.io", name="my-index-name") assert idx._vector_api.api_client.pool_threads == 10 @@ -146,5 +150,9 @@ def test_proxy_config(self): assert pc.openapi_config.proxy == "http://localhost:8080" assert pc.openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" - assert pc.index_api.api_client.configuration.proxy == "http://localhost:8080" - assert pc.index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" + # DBControl object is created lazily, so we need to access this property + # to trigger the setup so we can inspect the config + assert pc.db is not None + + assert pc.db.index_api.api_client.configuration.proxy == "http://localhost:8080" + assert pc.db.index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" diff --git a/tests/unit/test_index_initialization.py b/tests/unit/test_index_initialization.py index 3d10d636..29928fbc 100644 --- a/tests/unit/test_index_initialization.py +++ b/tests/unit/test_index_initialization.py @@ -51,5 +51,6 @@ def test_overwrite_useragent(self): def test_set_source_tag(self): pc = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", pc.index_api.api_client.user_agent) is not None + re.search(r"source_tag=test_source_tag", pc.db.index_api.api_client.user_agent) + is not None ) diff --git a/tests/unit/utils/test_docs_links.py b/tests/unit/utils/test_docs_links.py index 478ba3b2..c1d01b21 100644 --- a/tests/unit/utils/test_docs_links.py +++ b/tests/unit/utils/test_docs_links.py @@ -1,11 +1,17 @@ import pytest import requests from pinecone.utils import docslinks +from pinecone import __version__ urls = list(docslinks.values()) @pytest.mark.parametrize("url", urls) def test_valid_links(url): - response = requests.get(url) - assert response.status_code == 200, f"Docs link is invalid: {url}" + if isinstance(url, str): + response = requests.get(url) + assert response.status_code == 200, f"Docs link is invalid: {url}" + else: + versioned_url = url(__version__) + response = requests.get(versioned_url) + assert response.status_code == 200, f"Docs link is invalid: {versioned_url}" From 7fed334b9b9ab912f0e6238aa40c88b7c27867ea Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 25 Apr 2025 14:52:27 -0400 Subject: [PATCH 18/48] Update lockfile --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 048d84c6..e923876f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1962,4 +1962,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "96c8c770a4626bc9606a7b8e16537e217f238e20c217baa1206f4ef9debe5e82" +content-hash = "33aa755910ac34e4443a3e03a180ac1ece72735367f9c53d76908ca95ea2fd48" From 85d48422fb2fd11c8f6d2349c2c2f0ab81d07871 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 2 May 2025 16:04:34 -0400 Subject: [PATCH 19/48] Add integration tests for reorg methods --- .github/workflows/testing-integration.yaml | 25 ++ pinecone/db_control/request_factory.py | 1 - poetry.lock | 16 +- pyproject.toml | 1 + tests/__init__.py | 6 +- .../control/collections/__init__.py | 0 .../control/collections/conftest.py | 136 ++++++++ .../control/collections/helpers.py | 57 +++ .../control/collections/test_dense_index.py | 172 +++++++++ tests/integration/control/index/__init__.py | 0 tests/integration/control/index/conftest.py | 178 ++++++++++ .../control/index/test_configure.py | 43 +++ .../integration/control/index/test_create.py | 328 ++++++++++++++++++ .../integration/control/index/test_delete.py | 0 .../control/index/test_describe.py | 46 +++ tests/integration/control/index/test_has.py | 18 + tests/integration/control/index/test_list.py | 27 ++ 17 files changed, 1049 insertions(+), 5 deletions(-) create mode 100644 tests/integration/control/collections/__init__.py create mode 100644 tests/integration/control/collections/conftest.py create mode 100644 tests/integration/control/collections/helpers.py create mode 100644 tests/integration/control/collections/test_dense_index.py create mode 100644 tests/integration/control/index/__init__.py create mode 100644 tests/integration/control/index/conftest.py create mode 100644 tests/integration/control/index/test_configure.py create mode 100644 tests/integration/control/index/test_create.py create mode 100644 tests/integration/control/index/test_delete.py create mode 100644 tests/integration/control/index/test_describe.py create mode 100644 tests/integration/control/index/test_has.py create mode 100644 tests/integration/control/index/test_list.py diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 8275a5f1..53023bde 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -3,6 +3,31 @@ name: "Integration Tests" workflow_call: {} jobs: + reorg: + name: Reorg tests + runs-on: ubuntu-latest + env: + PINECONE_DEBUG_CURL: 'true' + PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' + strategy: + matrix: + python_version: [3.9, 3.12] + steps: + - uses: actions/checkout@v4 + - name: 'Set up Python ${{ matrix.python_version }}' + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python_version }}' + - name: Setup Poetry + uses: ./.github/actions/setup-poetry + with: + include_asyncio: true + - name: 'Run index tests' + run: poetry run pytest tests/integration/control/index --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG + - name: 'Run collection tests' + run: poetry run pytest tests/integration/control/collections --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG + inference: name: Inference tests diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index c2ecc905..719f71a1 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -68,7 +68,6 @@ def __parse_deletion_protection( deletion_protection: Union[DeletionProtection, str], ) -> DeletionProtectionModel: deletion_protection = convert_enum_to_string(deletion_protection) - print(deletion_protection) if deletion_protection in ["enabled", "disabled"]: return DeletionProtectionModel(deletion_protection) else: diff --git a/poetry.lock b/poetry.lock index e923876f..2e4de34b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1535,6 +1535,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.1.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +files = [ + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pytz" version = "2023.3.post1" @@ -1962,4 +1976,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "33aa755910ac34e4443a3e03a180ac1ece72735367f9c53d76908ca95ea2fd48" +content-hash = "0145fb2ae02a1cdd6fe06b191a6761dcee4f4c67fe057b48d6b501d7b0b504da" diff --git a/pyproject.toml b/pyproject.toml index 7a1b9a0a..788b2870 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,6 +97,7 @@ beautifulsoup4 = "^4.13.3" pinecone-plugin-assistant = "^1.6.0" vprof = "^0.38" tuna = "^0.5.11" +python-dotenv = "^1.1.0" [tool.poetry.extras] diff --git a/tests/__init__.py b/tests/__init__.py index f2dab92a..84ca0481 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ import logging -logging.basicConfig( - format="%(levelname)s [%(asctime)s] %(name)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" -) +# logging.basicConfig( +# format="%(levelname)s [%(asctime)s] %(name)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" +# ) diff --git a/tests/integration/control/collections/__init__.py b/tests/integration/control/collections/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/collections/conftest.py b/tests/integration/control/collections/conftest.py new file mode 100644 index 00000000..bb592cee --- /dev/null +++ b/tests/integration/control/collections/conftest.py @@ -0,0 +1,136 @@ +import pytest +import uuid +import time +import logging +import dotenv +import os +from datetime import datetime +from pinecone import Pinecone, NotFoundException, PineconeApiException +from ...helpers import get_environment_var + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + test_name = request.node.name + if test_name is None: + test_name = "" + else: + test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") + + tags = { + "test-suite": "pinecone-python-client", + "test-run": RUN_ID, + "test": test_name, + "created-at": datetime.now().strftime("%Y-%m-%d"), + } + + if os.getenv("USER"): + tags["user"] = os.getenv("USER") + return tags + + +@pytest.fixture() +def pc(): + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone( + api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} + ) + + +@pytest.fixture() +def pod_environment(): + return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") + + +def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): + logger.debug( + "Deleting index " + + index_name + + ", retry " + + str(retries) + + ", next sleep interval " + + str(sleep_interval) + ) + try: + pc.db.index.delete(name=index_name, timeout=-1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise Exception("Unable to delete index " + index_name) + time.sleep(sleep_interval) + delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) + else: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + except Exception as e: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all collection tests...") + + try: + pc = Pinecone() + indexes = pc.db.index.list() + test_indexes = [ + idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID + ] + + logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") + + for idx in test_indexes: + if idx.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {idx.name}") + pc.db.index.configure(name=idx.name, deletion_protection="disabled") + # Wait for index to be updated with status ready + logger.info(f"Waiting for index {idx.name} to be ready...") + timeout = 60 + while True and timeout > 0: + is_ready = pc.db.index.describe(name=idx.name).ready + if is_ready: + break + time.sleep(1) + timeout -= 1 + if timeout <= 0: + logger.warning(f"Index {idx.name} did not become ready in time") + else: + logger.info(f"Deletion protection is already disabled for index: {idx.name}") + + for idx in test_indexes: + try: + logger.info(f"Deleting index: {idx.name}") + pc.db.index.delete(name=idx.name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index {idx.name}: {str(e)}") + + collections = pc.db.collection.list() + logger.info(f"Collections to delete: {[col.name for col in collections]}") + + for col in collections: + try: + logger.info(f"Deleting collection: {col.name}") + pc.db.collection.delete(name=col.name) + except Exception as e: + logger.warning(f"Failed to delete collection {col.name}: {str(e)}") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup of collections tests completed") diff --git a/tests/integration/control/collections/helpers.py b/tests/integration/control/collections/helpers.py new file mode 100644 index 00000000..58633a69 --- /dev/null +++ b/tests/integration/control/collections/helpers.py @@ -0,0 +1,57 @@ +import time +import random +import logging + +logger = logging.getLogger(__name__) + + +def random_vector(dimension): + return [random.uniform(0, 1) for _ in range(dimension)] + + +def attempt_cleanup_collection(pc, collection_name): + max_wait = 120 + time_waited = 0 + deleted = False + + while time_waited < max_wait: + try: + pc.db.collection.delete(name=collection_name) + deleted = True + break + except Exception as e: + # Failures here usually happen because the backend thinks there is still some + # operation pending on the resource. + # These orphaned resources will get cleaned up by the cleanup job later. + logger.debug(f"Error while cleaning up collection: {e}") + logger.debug( + f"Waiting for collection {collection_name} to be deleted. Waited {time_waited} seconds..." + ) + time.sleep(10) + time_waited += 10 + if not deleted: + logger.warning(f"Collection {collection_name} was not deleted after {max_wait} seconds") + + +def attempt_cleanup_index(pc, index_name): + max_wait = 120 + time_waited = 0 + deleted = False + + while time_waited < max_wait: + try: + pc.db.index.delete(name=index_name) + deleted = True + break + except Exception as e: + # Failures here usually happen because the backend thinks there is still some + # operation pending on the resource. + # These orphaned resources will get cleaned up by the cleanup job later. + logger.debug(f"Error while cleaning up index: {e}") + logger.debug( + f"Waiting for index {index_name} to be deleted. Waited {time_waited} seconds..." + ) + time.sleep(10) + time_waited += 10 + if not deleted: + logger.warning(f"Index {index_name} was not deleted after {max_wait} seconds") diff --git a/tests/integration/control/collections/test_dense_index.py b/tests/integration/control/collections/test_dense_index.py new file mode 100644 index 00000000..58ad0832 --- /dev/null +++ b/tests/integration/control/collections/test_dense_index.py @@ -0,0 +1,172 @@ +import time +from pinecone import PodSpec +from ...helpers import generate_index_name, generate_collection_name +import logging +from .helpers import attempt_cleanup_collection, attempt_cleanup_index, random_vector + +logger = logging.getLogger(__name__) + + +class TestCollectionsHappyPath: + def test_dense_index_to_collection_to_index(self, pc, pod_environment, index_tags): + # Create a pod index + index_name = generate_index_name("pod-index") + dimension = 10 + metric = "cosine" + pod_index = pc.db.index.create( + name=index_name, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=pod_environment), + tags=index_tags, + ) + + # Insert some vectors into the pod index + idx = pc.Index(host=pod_index.host) + num_vectors = 10 + namespaces = ["", "test-ns1", "test-ns2"] + for namespace in namespaces: + vectors = [(str(i), random_vector(dimension)) for i in range(num_vectors)] + idx.upsert(vectors=vectors, namespace=namespace) + + # Wait for the vectors to be available + all_vectors_available = False + max_wait = 180 + time_waited = 0 + while not all_vectors_available and time_waited < max_wait: + all_vectors_available = True + desc = idx.describe_index_stats() + for namespace in namespaces: + if ( + desc.namespaces.get(namespace, None) is None + or desc.namespaces[namespace]["vector_count"] != num_vectors + ): + logger.debug(f"Waiting for vectors to be available in namespace {namespace}...") + all_vectors_available = False + break + for namespace in namespaces: + for i in range(num_vectors): + try: + idx.fetch(ids=[str(i)], namespace=namespace) + except Exception: + logger.debug( + f"Waiting for vector {i} to be available in namespace {namespace}..." + ) + all_vectors_available = False + break + if not all_vectors_available: + time.sleep(5) + time_waited += 5 + if not all_vectors_available: + raise Exception(f"Vectors were not available after {max_wait} seconds") + + # Create a collection from the pod index + collection_name = generate_collection_name("coll1") + pc.db.collection.create(name=collection_name, source=index_name) + collection_desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {collection_desc}") + assert collection_desc["name"] == collection_name + assert collection_desc["environment"] == pod_environment + assert collection_desc["status"] is not None + + # Wait for the collection to be ready + time_waited = 0 + max_wait = 120 + collection_ready = collection_desc["status"] + while collection_ready.lower() != "ready" and time_waited < max_wait: + logger.debug( + f"Waiting for collection {collection_name} to be ready. Waited {time_waited} seconds..." + ) + desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {desc}") + collection_ready = desc["status"] + if collection_ready.lower() != "ready": + time.sleep(10) + time_waited += 10 + if collection_ready.lower() != "ready": + raise Exception(f"Collection {collection_name} is not ready after {max_wait} seconds") + + # Verify the collection was created + assert collection_name in pc.db.collection.list().names() + + # Verify the collection has the correct info + collection_desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {collection_desc}") + assert collection_desc["name"] == collection_name + assert collection_desc["environment"] == pod_environment + assert collection_desc["status"] == "Ready" + assert collection_desc["dimension"] == dimension + assert collection_desc["vector_count"] == len(namespaces) * num_vectors + assert collection_desc["size"] is not None + assert collection_desc["size"] > 0 + + # Create new index from collection + index_name2 = generate_index_name("index-from-collection-" + collection_name) + print(f"Creating index {index_name} from collection {collection_name}...") + new_index = pc.db.index.create( + name=index_name2, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=pod_environment, source_collection=collection_name), + tags=index_tags, + ) + logger.debug(f"Created index {index_name2} from collection {collection_name}: {new_index}") + + # Wait for the index to be ready + max_wait = 120 + time_waited = 0 + index_ready = False + while not index_ready and time_waited < max_wait: + logger.debug( + f"Waiting for index {index_name} to be ready. Waited {time_waited} seconds..." + ) + desc = pc.db.index.describe(name=index_name) + logger.debug(f"Index {index_name} status: {desc['status']}") + index_ready = desc["status"]["ready"] == True + if not index_ready: + time.sleep(10) + time_waited += 10 + if not index_ready: + raise Exception(f"Index {index_name} is not ready after {max_wait} seconds") + + new_index_desc = pc.db.index.describe(name=index_name) + logger.debug(f"New index desc: {new_index_desc}") + assert new_index_desc["name"] == index_name + assert new_index_desc["status"]["ready"] == True + + new_idx = pc.Index(name=index_name) + + # Verify stats reflect the vectors present in the collection + stats = new_idx.describe_index_stats() + logger.debug(f"New index stats: {stats}") + assert stats.total_vector_count == len(namespaces) * num_vectors + + # Verify the vectors from the collection can be fetched + for namespace in namespaces: + results = new_idx.fetch(ids=[v[0] for v in vectors], namespace=namespace) + logger.debug(f"Results for namespace {namespace}: {results}") + assert len(results.vectors) != 0 + + # Verify the vectors from the collection can be queried by id + for namespace in namespaces: + for i in range(num_vectors): + results = new_idx.query(top_k=3, id=str(i), namespace=namespace) + logger.debug( + f"Query results for namespace {namespace} and id {i} in index {index_name2}: {results}" + ) + assert len(results.matches) == 3 + + # Compapre with results from original index + original_results = idx.query(top_k=3, id=str(i), namespace=namespace) + logger.debug( + f"Original query results for namespace {namespace} and id {i} in index {index_name}: {original_results}" + ) + assert len(original_results.matches) == 3 + assert original_results.matches[0].id == results.matches[0].id + assert original_results.matches[1].id == results.matches[1].id + assert original_results.matches[2].id == results.matches[2].id + + # Cleanup + attempt_cleanup_collection(pc, collection_name) + attempt_cleanup_index(pc, index_name) + attempt_cleanup_index(pc, index_name2) diff --git a/tests/integration/control/index/__init__.py b/tests/integration/control/index/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/index/conftest.py b/tests/integration/control/index/conftest.py new file mode 100644 index 00000000..805795f6 --- /dev/null +++ b/tests/integration/control/index/conftest.py @@ -0,0 +1,178 @@ +import pytest +import uuid +import time +import logging +import dotenv +import os +from datetime import datetime +from pinecone import Pinecone, NotFoundException, PineconeApiException +from ...helpers import generate_index_name, get_environment_var + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + test_name = request.node.name + if test_name is None: + test_name = "" + else: + test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") + + tags = { + "test-suite": "pinecone-python-client", + "test-run": RUN_ID, + "test": test_name, + "created-at": datetime.now().strftime("%Y-%m-%d"), + } + + if os.getenv("USER"): + tags["user"] = os.getenv("USER") + return tags + + +@pytest.fixture() +def pc(): + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone( + api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} + ) + + +@pytest.fixture() +def pod_environment(): + return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + pc.create_index(**create_sl_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = -1 + pc.create_index(**create_sl_index_params) + yield index_name + + +def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): + logger.debug( + "Deleting index " + + index_name + + ", retry " + + str(retries) + + ", next sleep interval " + + str(sleep_interval) + ) + try: + pc.db.index.delete(name=index_name, timeout=-1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise Exception("Unable to delete index " + index_name) + time.sleep(sleep_interval) + delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) + else: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + except Exception as e: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + + +@pytest.fixture(autouse=True) +def cleanup(pc, index_name): + yield + + try: + desc = pc.db.index.describe(name=index_name) + if desc.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {index_name}") + pc.db.index.configure(name=index_name, deletion_protection="disabled") + logger.debug("Attempting to delete index with name: " + index_name) + pc.db.index.delete(name=index_name, timeout=-1) + except Exception: + pass + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + try: + pc = Pinecone() + indexes = pc.db.index.list() + test_indexes = [ + idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID + ] + + logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") + + for idx in test_indexes: + if idx.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {idx.name}") + pc.db.index.configure(name=idx.name, deletion_protection="disabled") + # Wait for index to be updated with status ready + logger.info(f"Waiting for index {idx.name} to be ready...") + timeout = 60 + while True and timeout > 0: + is_ready = pc.db.index.describe(name=idx.name).ready + if is_ready: + break + time.sleep(1) + timeout -= 1 + if timeout <= 0: + logger.warning(f"Index {idx.name} did not become ready in time") + else: + logger.info(f"Deletion protection is already disabled for index: {idx.name}") + + for idx in test_indexes: + try: + logger.info(f"Deleting index: {idx.name}") + pc.db.index.delete(name=idx.name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index {idx.name}: {str(e)}") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup completed") diff --git a/tests/integration/control/index/test_configure.py b/tests/integration/control/index/test_configure.py new file mode 100644 index 00000000..f4c73094 --- /dev/null +++ b/tests/integration/control/index/test_configure.py @@ -0,0 +1,43 @@ +class TestConfigureIndexTags: + def test_add_index_tags(self, pc, ready_sl_index): + starting_tags = pc.db.index.describe(name=ready_sl_index).tags + assert "foo" not in starting_tags + assert "bar" not in starting_tags + + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags["foo"] == "FOO" + assert found_tags["bar"] == "BAR" + + def test_remove_tags_by_setting_empty_value_for_key(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) == "FOO", "foo should not be removed" + assert found_tags.get("bar", None) == "BAR", "bar should not be removed" + + pc.db.index.configure(name=ready_sl_index, tags={"foo": ""}) + found_tags2 = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags2 is not None + assert found_tags2.get("foo", None) is None, "foo should be removed" + assert found_tags2.get("bar", None) == "BAR", "bar should not be removed" + + def test_merge_new_tags_with_existing_tags(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={"baz": "BAZ"}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) == "FOO", "foo should not be removed" + assert found_tags.get("bar", None) == "BAR", "bar should not be removed" + assert found_tags.get("baz", None) == "BAZ", "baz should be added" + + def test_remove_multiple_tags(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={"foo": "", "bar": ""}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) is None, "foo should be removed" + assert found_tags.get("bar", None) is None, "bar should be removed" diff --git a/tests/integration/control/index/test_create.py b/tests/integration/control/index/test_create.py new file mode 100644 index 00000000..75ffabf2 --- /dev/null +++ b/tests/integration/control/index/test_create.py @@ -0,0 +1,328 @@ +import pytest +import time +from pinecone import ( + Pinecone, + Metric, + VectorType, + DeletionProtection, + ServerlessSpec, + PodSpec, + CloudProvider, + AwsRegion, + PineconeApiValueError, + PineconeApiException, + PineconeApiTypeError, + PodIndexEnvironment, +) + + +class TestCreateServerlessIndexHappyPath: + def test_create_index(self, pc: Pinecone, index_name): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" # default value + assert resp.vector_type == "dense" # default value + assert resp.deletion_protection == "disabled" # default value + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.deletion_protection == "disabled" # default value + assert desc.vector_type == "dense" # default value + + def test_create_skip_wait(self, pc, index_name): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + timeout=-1, + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + def test_create_infinite_wait(self, pc, index_name): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + timeout=None, + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + def test_create_default_index_with_metric(self, pc, create_sl_index_params, metric): + create_sl_index_params["metric"] = metric + pc.db.index.create(**create_sl_index_params) + desc = pc.db.index.describe(create_sl_index_params["name"]) + if isinstance(metric, str): + assert desc.metric == metric + else: + assert desc.metric == metric.value + assert desc.vector_type == "dense" + + @pytest.mark.parametrize( + "metric_enum,vector_type_enum,dim,tags", + [ + (Metric.COSINE, VectorType.DENSE, 10, None), + (Metric.EUCLIDEAN, VectorType.DENSE, 10, {"env": "prod"}), + (Metric.DOTPRODUCT, VectorType.SPARSE, None, {"env": "dev"}), + ], + ) + def test_create_with_enum_values( + self, pc, index_name, metric_enum, vector_type_enum, dim, tags + ): + args = { + "name": index_name, + "metric": metric_enum, + "vector_type": vector_type_enum, + "deletion_protection": DeletionProtection.DISABLED, + "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + "tags": tags, + } + if dim is not None: + args["dimension"] = dim + + pc.db.index.create(**args) + + desc = pc.db.index.describe(index_name) + assert desc.metric == metric_enum.value + assert desc.vector_type == vector_type_enum.value + assert desc.dimension == dim + assert desc.deletion_protection == DeletionProtection.DISABLED.value + assert desc.name == index_name + assert desc.spec.serverless.cloud == "aws" + assert desc.spec.serverless.region == "us-east-1" + if tags: + assert desc.tags.to_dict() == tags + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + def test_create_dense_index_with_metric(self, pc, create_sl_index_params, metric): + create_sl_index_params["metric"] = metric + create_sl_index_params["vector_type"] = VectorType.DENSE + pc.db.index.create(**create_sl_index_params) + desc = pc.db.index.describe(create_sl_index_params["name"]) + assert desc.metric == metric + assert desc.vector_type == "dense" + + def test_create_with_optional_tags(self, pc, create_sl_index_params): + tags = {"foo": "FOO", "bar": "BAR"} + create_sl_index_params["tags"] = tags + pc.db.index.create(**create_sl_index_params) + desc = pc.db.index.describe(create_sl_index_params["name"]) + assert desc.tags.to_dict() == tags + + +class TestCreatePodIndexHappyPath: + def test_create_index_minimal_config( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec(environment=pod_environment), + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.spec.pod.environment == pod_environment + assert desc.tags.to_dict() == index_tags + assert desc.status.ready == True + assert desc.status.state == "Ready" + assert desc.vector_type == "dense" + + def test_create_index_with_spec_options( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec( + environment=pod_environment, + pod_type="p1.x2", + replicas=2, + metadata_config={"indexed": ["foo", "bar"]}, + ), + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.spec.pod.environment == pod_environment + assert desc.spec.pod.pod_type == "p1.x2" + assert desc.spec.pod.replicas == 2 + assert desc.spec.pod.metadata_config.indexed == ["foo", "bar"] + + def test_create_index_with_deletion_protection( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec(environment=pod_environment), + tags=index_tags, + deletion_protection=DeletionProtection.ENABLED, + ) + + try: + pc.db.index.delete(name=index_name) + except PineconeApiException as e: + assert "Deletion protection is enabled for this index" in str(e) + + pc.db.index.configure(name=index_name, deletion_protection=DeletionProtection.DISABLED) + max_wait_time = 60 + while pc.db.index.describe(name=index_name).status.ready == False: + time.sleep(1) + max_wait_time -= 1 + if max_wait_time <= 0: + raise Exception("Index did not become ready in time") + + pc.db.index.delete(name=index_name) + assert pc.db.index.has(name=index_name) == False + + +class TestCreatePodIndexApiErrorCases: + def test_pod_index_does_not_support_sparse_vectors(self, pc, index_name, index_tags): + with pytest.raises(PineconeApiException) as e: + pc.db.index.create( + name=index_name, + metric="dotproduct", + spec=PodSpec(environment=PodIndexEnvironment.AWS_US_EAST_1), + vector_type="sparse", + tags=index_tags, + ) + assert "Sparse vector type is not supported for pod indexes" in str(e.value) + + +class TestCreateServerlessIndexApiErrorCases: + def test_create_index_with_invalid_name(self, pc, create_sl_index_params): + create_sl_index_params["name"] = "Invalid-name" + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_sl_index_params) + + def test_create_index_invalid_metric(self, pc, create_sl_index_params): + create_sl_index_params["metric"] = "invalid" + with pytest.raises(PineconeApiValueError): + pc.db.index.create(**create_sl_index_params) + + def test_create_index_with_invalid_neg_dimension(self, pc, create_sl_index_params): + create_sl_index_params["dimension"] = -1 + with pytest.raises(PineconeApiValueError): + pc.db.index.create(**create_sl_index_params) + + def test_create_index_that_already_exists(self, pc, create_sl_index_params): + pc.db.index.create(**create_sl_index_params) + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_sl_index_params) + + +class TestCreateServerlessIndexWithTimeout: + def test_create_index_default_timeout(self, pc, create_sl_index_params): + create_sl_index_params["timeout"] = None + pc.db.index.create(**create_sl_index_params) + # Waits infinitely for index to be ready + desc = pc.db.index.describe(create_sl_index_params["name"]) + assert desc.status.ready == True + + def test_create_index_when_timeout_set(self, pc, create_sl_index_params): + create_sl_index_params["timeout"] = ( + 1000 # effectively infinite, but different code path from None + ) + pc.db.index.create(**create_sl_index_params) + desc = pc.db.index.describe(name=create_sl_index_params["name"]) + assert desc.status.ready == True + + def test_create_index_with_negative_timeout(self, pc, create_sl_index_params): + create_sl_index_params["timeout"] = -1 + pc.db.index.create(**create_sl_index_params) + desc = pc.db.index.describe(create_sl_index_params["name"]) + # Returns immediately without waiting for index to be ready + assert desc.status.ready in [False, True] + + +class TestCreateIndexTypeErrorCases: + def test_create_index_with_invalid_str_dimension(self, pc, create_sl_index_params): + create_sl_index_params["dimension"] = "10" + with pytest.raises(PineconeApiTypeError): + pc.db.index.create(**create_sl_index_params) + + def test_create_index_with_missing_dimension(self, pc, create_sl_index_params): + del create_sl_index_params["dimension"] + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_sl_index_params) + + def test_create_index_w_incompatible_options(self, pc, create_sl_index_params): + create_sl_index_params["pod_type"] = "p1.x2" + create_sl_index_params["environment"] = "us-east1-gcp" + create_sl_index_params["replicas"] = 2 + with pytest.raises(TypeError): + pc.db.index.create(**create_sl_index_params) + + @pytest.mark.parametrize("required_option", ["name", "spec", "dimension"]) + def test_create_with_missing_required_options( + self, pc, create_sl_index_params, required_option + ): + del create_sl_index_params[required_option] + with pytest.raises(Exception) as e: + pc.db.index.create(**create_sl_index_params) + assert required_option.lower() in str(e.value).lower() + + +class TestSparseIndex: + def test_create_sparse_index_minimal_config(self, pc: Pinecone, index_name, index_tags): + pc.db.index.create( + name=index_name, + metric="dotproduct", + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + vector_type=VectorType.SPARSE, + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.metric == "dotproduct" + assert desc.vector_type == "sparse" + + +class TestSparseIndexErrorCases: + def test_sending_dimension_with_sparse_index(self, pc, index_tags): + with pytest.raises(ValueError) as e: + pc.db.index.create( + name="test-index", + dimension=10, + metric="dotproduct", + vector_type=VectorType.SPARSE, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, + ) + assert "dimension should not be specified for sparse indexes" in str(e.value) + + @pytest.mark.parametrize("bad_metric", ["cosine", "euclidean"]) + def test_sending_metric_other_than_dotproduct_with_sparse_index( + self, pc, index_tags, bad_metric + ): + with pytest.raises(PineconeApiException) as e: + pc.db.index.create( + name="test-index", + metric=bad_metric, + vector_type=VectorType.SPARSE, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, + ) + assert "Sparse vector indexes must use the metric dotproduct" in str(e.value) diff --git a/tests/integration/control/index/test_delete.py b/tests/integration/control/index/test_delete.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/index/test_describe.py b/tests/integration/control/index/test_describe.py new file mode 100644 index 00000000..df7f5896 --- /dev/null +++ b/tests/integration/control/index/test_describe.py @@ -0,0 +1,46 @@ +from pinecone import IndexModel + + +class TestDescribeIndex: + def test_describe_index_when_ready(self, pc, ready_sl_index, create_sl_index_params): + description = pc.db.index.describe(ready_sl_index) + + assert isinstance(description, IndexModel) + assert description.name == ready_sl_index + assert description.dimension == create_sl_index_params["dimension"] + assert description.metric == create_sl_index_params["metric"] + assert ( + description.spec.serverless["cloud"] + == create_sl_index_params["spec"]["serverless"]["cloud"] + ) + assert ( + description.spec.serverless["region"] + == create_sl_index_params["spec"]["serverless"]["region"] + ) + + assert isinstance(description.host, str) + assert description.host != "" + assert ready_sl_index in description.host + + assert description.status.state == "Ready" + assert description.status.ready == True + + def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_sl_index_params): + description = pc.db.index.describe(notready_sl_index) + + assert isinstance(description, IndexModel) + assert description.name == notready_sl_index + assert description.dimension == create_sl_index_params["dimension"] + assert description.metric == create_sl_index_params["metric"] + assert ( + description.spec.serverless["cloud"] + == create_sl_index_params["spec"]["serverless"]["cloud"] + ) + assert ( + description.spec.serverless["region"] + == create_sl_index_params["spec"]["serverless"]["region"] + ) + + assert isinstance(description.host, str) + assert description.host != "" + assert notready_sl_index in description.host diff --git a/tests/integration/control/index/test_has.py b/tests/integration/control/index/test_has.py new file mode 100644 index 00000000..1a356a99 --- /dev/null +++ b/tests/integration/control/index/test_has.py @@ -0,0 +1,18 @@ +from tests.integration.helpers import random_string + + +class TestHasIndex: + def test_index_exists_success(self, pc, create_sl_index_params): + name = create_sl_index_params["name"] + pc.db.index.create(**create_sl_index_params) + has_index = pc.db.index.has(name) + assert has_index == True + + def test_index_does_not_exist(self, pc): + name = random_string(8) + has_index = pc.db.index.has(name) + assert has_index == False + + def test_has_index_with_null_index_name(self, pc): + has_index = pc.db.index.has("") + assert has_index == False diff --git a/tests/integration/control/index/test_list.py b/tests/integration/control/index/test_list.py new file mode 100644 index 00000000..4e217ea5 --- /dev/null +++ b/tests/integration/control/index/test_list.py @@ -0,0 +1,27 @@ +from pinecone import IndexModel + + +class TestListIndexes: + def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_sl_index_params): + list_response = pc.db.index.list() + assert len(list_response.indexes) != 0 + assert isinstance(list_response.indexes[0], IndexModel) + + created_index = [index for index in list_response.indexes if index.name == ready_sl_index][ + 0 + ] + assert created_index.name == ready_sl_index + assert created_index.dimension == create_sl_index_params["dimension"] + assert created_index.metric == create_sl_index_params["metric"] + assert ready_sl_index in created_index.host + + def test_list_indexes_includes_not_ready_indexes(self, pc, notready_sl_index): + list_response = pc.db.index.list() + assert len(list_response.indexes) != 0 + assert isinstance(list_response.indexes[0], IndexModel) + + created_index = [ + index for index in list_response.indexes if index.name == notready_sl_index + ][0] + assert created_index.name == notready_sl_index + assert notready_sl_index in created_index.name From 85c48398092135ea0df76818c8b04ac26a20b4ae Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 6 May 2025 10:21:20 -0400 Subject: [PATCH 20/48] Fix mypy errors --- pinecone/__init__.py | 48 ++--------------- pinecone/__init__.pyi | 118 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 121 insertions(+), 45 deletions(-) create mode 100644 pinecone/__init__.pyi diff --git a/pinecone/__init__.py b/pinecone/__init__.py index f228eddf..51112c63 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -37,7 +37,7 @@ "QueryResponse": ("pinecone.db_data.models", "QueryResponse"), "UpsertResponse": ("pinecone.db_data.models", "UpsertResponse"), "UpdateRequest": ("pinecone.db_data.models", "UpdateRequest"), - "ImportErrorMode": ("pinecone.core.openapi.db_data.model", "ImportErrorMode"), + "ImportErrorMode": ("pinecone.core.openapi.db_data.models", "ImportErrorMode"), "VectorDictionaryMissingKeysError": ( "pinecone.db_data.errors", "VectorDictionaryMissingKeysError", @@ -117,49 +117,8 @@ # Primary client classes "Pinecone", "PineconeAsyncio", - # Config classes - "Config", - "ConfigBuilder", - "PineconeConfig", - # OpenAPI classes - "CloudProvider", - "AwsRegion", - "GcpRegion", - "AzureRegion", - "PodIndexEnvironment", - "Metric", - "VectorType", - "DeletionProtection", - "CollectionDescription", - "CollectionList", - "IndexList", - "IndexModel", - "IndexEmbed", - "ImportErrorMode", - "ServerlessSpec", - "ServerlessSpecDefinition", - "PodSpec", - "PodSpecDefinition", - "PodType", - "Vector", - "FetchResponse", - "DeleteRequest", - "DescribeIndexStatsRequest", - "DescribeIndexStatsResponse", - "RpcStatus", - "ScoredVector", - "SingleQueryResults", - "QueryRequest", - "QueryResponse", - "SearchQuery", - "SearchQueryVector", - "SearchRerank", - "UpsertResponse", - "UpdateRequest", - "SparseValues", - # Inference classes - "RerankModel", - "EmbedModel", + # All lazy-loaded types + *list(_LAZY_IMPORTS.keys()), # Exception classes "PineconeException", "PineconeApiException", @@ -169,7 +128,6 @@ "PineconeApiTypeError", "PineconeApiValueError", "PineconeApiKeyError", - "PineconeApiException", "NotFoundException", "UnauthorizedException", "ForbiddenException", diff --git a/pinecone/__init__.pyi b/pinecone/__init__.pyi new file mode 100644 index 00000000..249fe9d8 --- /dev/null +++ b/pinecone/__init__.pyi @@ -0,0 +1,118 @@ +from pinecone.config import Config +from pinecone.config import ConfigBuilder +from pinecone.config import PineconeConfig +from pinecone.inference import RerankModel +from pinecone.inference import EmbedModel +from pinecone.db_data.dataclasses import ( + Vector, + SparseValues, + SearchQuery, + SearchQueryVector, + SearchRerank, +) +from pinecone.db_data.models import ( + FetchResponse, + DeleteRequest, + DescribeIndexStatsRequest, + IndexDescription as DescribeIndexStatsResponse, + RpcStatus, + ScoredVector, + SingleQueryResults, + QueryRequest, + QueryResponse, + UpsertResponse, + UpdateRequest, +) +from pinecone.core.openapi.db_data.models import ImportErrorMode +from pinecone.db_data.errors import ( + VectorDictionaryMissingKeysError, + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + SparseValuesTypeError, + SparseValuesMissingKeysError, + SparseValuesDictionaryExpectedError, +) +from pinecone.db_control.enums import ( + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + PodIndexEnvironment, + Metric, + VectorType, + DeletionProtection, + PodType, +) +from pinecone.db_control.models import ( + CollectionDescription, + CollectionList, + IndexList, + IndexModel, + IndexEmbed, + ServerlessSpec, + ServerlessSpecDefinition, + PodSpec, + PodSpecDefinition, +) +from pinecone.pinecone import Pinecone +from pinecone.pinecone_asyncio import PineconeAsyncio + +# Re-export all the types +__all__ = [ + # Primary client classes + "Pinecone", + "PineconeAsyncio", + # Config classes + "Config", + "ConfigBuilder", + "PineconeConfig", + # Inference classes + "RerankModel", + "EmbedModel", + # Data classes + "Vector", + "SparseValues", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + # Model classes + "FetchResponse", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "RpcStatus", + "ScoredVector", + "SingleQueryResults", + "QueryRequest", + "QueryResponse", + "UpsertResponse", + "UpdateRequest", + "ImportErrorMode", + # Error classes + "VectorDictionaryMissingKeysError", + "VectorDictionaryExcessKeysError", + "VectorTupleLengthError", + "SparseValuesTypeError", + "SparseValuesMissingKeysError", + "SparseValuesDictionaryExpectedError", + # Control plane enums + "CloudProvider", + "AwsRegion", + "GcpRegion", + "AzureRegion", + "PodIndexEnvironment", + "Metric", + "VectorType", + "DeletionProtection", + "PodType", + # Control plane models + "CollectionDescription", + "CollectionList", + "IndexList", + "IndexModel", + "IndexEmbed", + "ServerlessSpec", + "ServerlessSpecDefinition", + "PodSpec", + "PodSpecDefinition", +] From 93d16103fbbb56d20b0898982ac1bc5472167598 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 6 May 2025 10:58:42 -0400 Subject: [PATCH 21/48] Fix test failures, lint errors --- pinecone/control/__init__.py | 4 +-- pinecone/data/__init__.py | 4 +-- pinecone/data/features/__init__.py | 4 +-- .../data/features/bulk_imports/__init__.py | 4 +-- pinecone/data/features/inference/__init__.py | 4 +-- pinecone/db_control/db_control.py | 6 ++--- pinecone/db_control/resources/sync/index.py | 26 +++++++++---------- pinecone/models/__init__.py | 4 +-- pinecone/openapi_support/exceptions.py | 2 +- pinecone/pinecone.py | 23 ++++++++++++++++ pinecone/pinecone_asyncio.py | 23 ++++++++++++++++ 11 files changed, 75 insertions(+), 29 deletions(-) diff --git a/pinecone/control/__init__.py b/pinecone/control/__init__.py index 4f04e477..b45bc64e 100644 --- a/pinecone/control/__init__.py +++ b/pinecone/control/__init__.py @@ -1,9 +1,9 @@ import warnings +from pinecone.db_control import * + warnings.warn( "The module at `pinecone.control` has moved to `pinecone.db_control`. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.db_control import * diff --git a/pinecone/data/__init__.py b/pinecone/data/__init__.py index 0268ac16..3ea4cd41 100644 --- a/pinecone/data/__init__.py +++ b/pinecone/data/__init__.py @@ -1,10 +1,10 @@ import warnings +from pinecone.db_data import * + warnings.warn( "The module at `pinecone.data` has moved to `pinecone.db_data`. " "Please update your imports. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.db_data import * diff --git a/pinecone/data/features/__init__.py b/pinecone/data/features/__init__.py index fd64a554..e4ff12ee 100644 --- a/pinecone/data/features/__init__.py +++ b/pinecone/data/features/__init__.py @@ -1,10 +1,10 @@ import warnings +from pinecone.db_data.features import * + warnings.warn( "The module at `pinecone.data.features` has moved to `pinecone.db_data.features`. " "Please update your imports. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.db_data.features import * diff --git a/pinecone/data/features/bulk_imports/__init__.py b/pinecone/data/features/bulk_imports/__init__.py index 740d503b..3af0d1f5 100644 --- a/pinecone/data/features/bulk_imports/__init__.py +++ b/pinecone/data/features/bulk_imports/__init__.py @@ -1,10 +1,10 @@ import warnings +from pinecone.db_data.features.bulk_import import * + warnings.warn( "The module at `pinecone.data.features.bulk_import` has moved to `pinecone.db_data.features.bulk_import`. " "Please update your imports. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.db_data.features.bulk_import import * diff --git a/pinecone/data/features/inference/__init__.py b/pinecone/data/features/inference/__init__.py index b0918dd5..0280f382 100644 --- a/pinecone/data/features/inference/__init__.py +++ b/pinecone/data/features/inference/__init__.py @@ -1,10 +1,10 @@ import warnings +from pinecone.inference import * + warnings.warn( "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " "Please update your imports. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.inference import * diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index f03f349d..d0cc92fd 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -27,7 +27,7 @@ def __init__(self, config, openapi_config, pool_threads): self.pool_threads = pool_threads """ @private """ - self.index_api = setup_openapi_client( + self._index_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=ManageIndexesApi, config=self.config, @@ -48,7 +48,7 @@ def index(self) -> "IndexResource": if self._index_resource is None: from .resources.sync.index import IndexResource - self._index_resource = IndexResource(index_api=self.index_api, config=self.config) + self._index_resource = IndexResource(index_api=self._index_api, config=self.config) return self._index_resource @property @@ -56,5 +56,5 @@ def collection(self) -> "CollectionResource": if self._collection_resource is None: from .resources.sync.collection import CollectionResource - self._collection_resource = CollectionResource(self.index_api) + self._collection_resource = CollectionResource(self._index_api) return self._collection_resource diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index 85876d6c..6ecf4cd2 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -27,13 +27,13 @@ class IndexResource: def __init__(self, index_api, config): - self.index_api = index_api + self._index_api = index_api """ @private """ - self.config = config + self._config = config """ @private """ - self.index_host_store = IndexHostStore() + self._index_host_store = IndexHostStore() """ @private """ def create( @@ -56,7 +56,7 @@ def create( vector_type=vector_type, tags=tags, ) - resp = self.index_api.create_index(create_index_request=req) + resp = self._index_api.create_index(create_index_request=req) if timeout == -1: return IndexModel(resp) @@ -80,7 +80,7 @@ def create_for_model( tags=tags, deletion_protection=deletion_protection, ) - resp = self.index_api.create_index_for_model(req) + resp = self._index_api.create_index_for_model(req) if timeout == -1: return IndexModel(resp) @@ -125,8 +125,8 @@ def is_ready() -> bool: return description def delete(self, name: str, timeout: Optional[int] = None): - self.index_api.delete_index(name) - self.index_host_store.delete_host(self.config, name) + self._index_api.delete_index(name) + self._index_host_store.delete_host(self._config, name) if timeout == -1: return @@ -148,14 +148,14 @@ def delete(self, name: str, timeout: Optional[int] = None): ) def list(self) -> IndexList: - response = self.index_api.list_indexes() + response = self._index_api.list_indexes() return IndexList(response) def describe(self, name: str) -> IndexModel: - api_instance = self.index_api + api_instance = self._index_api description = api_instance.describe_index(name) host = description.host - self.index_host_store.set_host(self.config, name, host) + self._index_host_store.set_host(self._config, name, host) return IndexModel(description) @@ -173,7 +173,7 @@ def configure( deletion_protection: Optional[Union[DeletionProtection, str]] = None, tags: Optional[Dict[str, str]] = None, ): - api_instance = self.index_api + api_instance = self._index_api description = self.describe(name=name) req = PineconeDBControlRequestFactory.configure_index_request( @@ -187,6 +187,6 @@ def configure( def _get_host(self, name: str) -> str: """@private""" - return self.index_host_store.get_host( - api=self.index_api, config=self.config, index_name=name + return self._index_host_store.get_host( + api=self._index_api, config=self._config, index_name=name ) diff --git a/pinecone/models/__init__.py b/pinecone/models/__init__.py index 74a1658c..fb94ddf5 100644 --- a/pinecone/models/__init__.py +++ b/pinecone/models/__init__.py @@ -1,9 +1,9 @@ import warnings +from pinecone.db_control.models import * + warnings.warn( "The module at `pinecone.models` has moved to `pinecone.db_control.models`. " "This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, ) - -from pinecone.db_control.models import * diff --git a/pinecone/openapi_support/exceptions.py b/pinecone/openapi_support/exceptions.py index 5729b13e..c9fcc571 100644 --- a/pinecone/openapi_support/exceptions.py +++ b/pinecone/openapi_support/exceptions.py @@ -1 +1 @@ -from pinecone.exceptions import * +from pinecone.exceptions import * # noqa: F403 diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 38462390..c12c228a 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -1,6 +1,7 @@ import logging from typing import Optional, Dict, Union, TYPE_CHECKING from multiprocessing import cpu_count +import warnings from .legacy_pinecone_interface import LegacyPineconeDBControlInterface @@ -19,6 +20,8 @@ _IndexAsyncio as IndexAsyncio, ) from pinecone.db_control import DBControl + from pinecone.db_control.index_host_store import IndexHostStore + from pinecone.core.openapi.db_control.api.manage_indexes_api import IndexOperationsApi from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict from pinecone.db_control.enums import ( Metric, @@ -119,6 +122,26 @@ def db(self) -> "DBControl": ) return self._db_control + @property + def index_host_store(self) -> "IndexHostStore": + """@private""" + warnings.warn( + "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db.index._index_host_store + + @property + def index_api(self) -> "IndexOperationsApi": + """@private""" + warnings.warn( + "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db._index_api + def create_index( self, name: str, diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 3da739f7..321d9c6e 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -1,4 +1,5 @@ import logging +import warnings from typing import Optional, Dict, Union, TYPE_CHECKING from pinecone.config import PineconeConfig, ConfigBuilder @@ -30,6 +31,8 @@ CollectionList, IndexEmbed, ) + from pinecone.core.openapi.db_control.api.manage_indexes_api import IndexOperationsApi + from pinecone.db_control.index_host_store import IndexHostStore logger = logging.getLogger(__name__) """ @private """ @@ -167,6 +170,26 @@ def db(self): ) return self._db_control + @property + def index_host_store(self) -> "IndexHostStore": + """@private""" + warnings.warn( + "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db.index._index_host_store + + @property + def index_api(self) -> "IndexOperationsApi": + """@private""" + warnings.warn( + "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db._index_api + async def create_index( self, name: str, From 8937278714add320731430414f984d8289106a0a Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 6 May 2025 11:03:19 -0400 Subject: [PATCH 22/48] Fix grpc unit tests --- tests/unit/test_config.py | 6 ++-- tests/unit/test_control.py | 48 ++++++++++++------------- tests/unit/test_index_initialization.py | 2 +- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 1da981ad..08e2a493 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -107,7 +107,7 @@ def test_config_pool_threads(self): # to trigger the setup so we can inspect the config assert pc.db is not None - assert pc.db.index_api.api_client.pool_threads == 10 + assert pc.db._index_api.api_client.pool_threads == 10 idx = pc.Index(host="my-index-host.pinecone.io", name="my-index-name") assert idx._vector_api.api_client.pool_threads == 10 @@ -154,5 +154,5 @@ def test_proxy_config(self): # to trigger the setup so we can inspect the config assert pc.db is not None - assert pc.db.index_api.api_client.configuration.proxy == "http://localhost:8080" - assert pc.db.index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" + assert pc.db._index_api.api_client.configuration.proxy == "http://localhost:8080" + assert pc.db._index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" diff --git a/tests/unit/test_control.py b/tests/unit/test_control.py index da252063..6cce0f92 100644 --- a/tests/unit/test_control.py +++ b/tests/unit/test_control.py @@ -87,37 +87,37 @@ def test_plugins_are_lazily_loaded(self): def test_default_host(self): p = Pinecone(api_key="123-456-789") - assert p.db.index_api.api_client.configuration.host == "https://api.pinecone.io" + assert p.db._index_api.api_client.configuration.host == "https://api.pinecone.io" def test_passing_host(self): p = Pinecone(api_key="123-456-789", host="my-host.pinecone.io") - assert p.db.index_api.api_client.configuration.host == "https://my-host.pinecone.io" + assert p.db._index_api.api_client.configuration.host == "https://my-host.pinecone.io" def test_passing_additional_headers(self): extras = {"header1": "my-value", "header2": "my-value2"} p = Pinecone(api_key="123-456-789", additional_headers=extras) for key, value in extras.items(): - assert p.db.index_api.api_client.default_headers[key] == value - assert "User-Agent" in p.db.index_api.api_client.default_headers - assert "X-Pinecone-API-Version" in p.db.index_api.api_client.default_headers - assert "header1" in p.db.index_api.api_client.default_headers - assert "header2" in p.db.index_api.api_client.default_headers - assert len(p.db.index_api.api_client.default_headers) == 4 + assert p.db._index_api.api_client.default_headers[key] == value + assert "User-Agent" in p.db._index_api.api_client.default_headers + assert "X-Pinecone-API-Version" in p.db._index_api.api_client.default_headers + assert "header1" in p.db._index_api.api_client.default_headers + assert "header2" in p.db._index_api.api_client.default_headers + assert len(p.db._index_api.api_client.default_headers) == 4 def test_overwrite_useragent(self): # This doesn't seem like a common use case, but we may want to allow this # when embedding the client in other pinecone tools such as canopy. extras = {"User-Agent": "test-user-agent"} p = Pinecone(api_key="123-456-789", additional_headers=extras) - assert "X-Pinecone-API-Version" in p.db.index_api.api_client.default_headers - assert p.db.index_api.api_client.default_headers["User-Agent"] == "test-user-agent" - assert len(p.db.index_api.api_client.default_headers) == 2 + assert "X-Pinecone-API-Version" in p.db._index_api.api_client.default_headers + assert p.db._index_api.api_client.default_headers["User-Agent"] == "test-user-agent" + assert len(p.db._index_api.api_client.default_headers) == 2 def test_set_source_tag_in_useragent(self): p = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", p.db.index_api.api_client.user_agent) + re.search(r"source_tag=test_source_tag", p.db._index_api.api_client.user_agent) is not None ) @@ -150,8 +150,8 @@ def test_create_index_with_timeout( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.db.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.db.index_api, "create_index") + mocker.patch.object(p.db._index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db._index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -161,8 +161,8 @@ def test_create_index_with_timeout( timeout=timeout_value, ) - assert p.db.index_api.create_index.call_count == 1 - assert p.db.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db._index_api.create_index.call_count == 1 + assert p.db._index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls @pytest.mark.parametrize( @@ -211,7 +211,7 @@ def test_create_index_with_spec_dictionary(self, mocker, index_spec): p = Pinecone(api_key="123-456-789") mock_api = MagicMock() - mocker.patch.object(p.db, "index_api", mock_api) + mocker.patch.object(p.db, "_index_api", mock_api) p.create_index(name="my-index", dimension=10, spec=index_spec) @@ -246,8 +246,8 @@ def test_create_index_from_source_collection( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.db.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.db.index_api, "create_index") + mocker.patch.object(p.db._index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db._index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -257,18 +257,18 @@ def test_create_index_from_source_collection( timeout=timeout_value, ) - assert p.db.index_api.create_index.call_count == 1 - assert p.db.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db._index_api.create_index.call_count == 1 + assert p.db._index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls def test_create_index_when_timeout_exceeded(self, mocker): with pytest.raises(TimeoutError): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.db.index_api, "create_index") + mocker.patch.object(p.db._index_api, "create_index") describe_index_response = [description_with_status(False)] * 5 mocker.patch.object( - p.db.index_api, "describe_index", side_effect=describe_index_response + p.db._index_api, "describe_index", side_effect=describe_index_response ) mocker.patch("time.sleep") @@ -279,7 +279,7 @@ def test_create_index_when_timeout_exceeded(self, mocker): def test_list_indexes_returns_iterable(self, mocker, index_list_response): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.db.index_api, "list_indexes", side_effect=[index_list_response]) + mocker.patch.object(p.db._index_api, "list_indexes", side_effect=[index_list_response]) response = p.list_indexes() assert [i.name for i in response] == ["index1", "index2", "index3"] diff --git a/tests/unit/test_index_initialization.py b/tests/unit/test_index_initialization.py index 29928fbc..e20e3f78 100644 --- a/tests/unit/test_index_initialization.py +++ b/tests/unit/test_index_initialization.py @@ -51,6 +51,6 @@ def test_overwrite_useragent(self): def test_set_source_tag(self): pc = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", pc.db.index_api.api_client.user_agent) + re.search(r"source_tag=test_source_tag", pc.db._index_api.api_client.user_agent) is not None ) From b5b3b85f3d21992bda28c0bcf3308b945286138f Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 6 May 2025 11:15:33 -0400 Subject: [PATCH 23/48] Fix lint errors --- pinecone/pinecone.py | 4 ++-- pinecone/pinecone_asyncio.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index c12c228a..1d65cee1 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -21,7 +21,7 @@ ) from pinecone.db_control import DBControl from pinecone.db_control.index_host_store import IndexHostStore - from pinecone.core.openapi.db_control.api.manage_indexes_api import IndexOperationsApi + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict from pinecone.db_control.enums import ( Metric, @@ -133,7 +133,7 @@ def index_host_store(self) -> "IndexHostStore": return self.db.index._index_host_store @property - def index_api(self) -> "IndexOperationsApi": + def index_api(self) -> "ManageIndexesApi": """@private""" warnings.warn( "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 321d9c6e..504dcdd7 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -31,7 +31,7 @@ CollectionList, IndexEmbed, ) - from pinecone.core.openapi.db_control.api.manage_indexes_api import IndexOperationsApi + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi from pinecone.db_control.index_host_store import IndexHostStore logger = logging.getLogger(__name__) @@ -181,7 +181,7 @@ def index_host_store(self) -> "IndexHostStore": return self.db.index._index_host_store @property - def index_api(self) -> "IndexOperationsApi": + def index_api(self) -> "ManageIndexesApi": """@private""" warnings.warn( "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", From 163cde79246bdc2ef3a4b546679a73926cf9a176 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 7 May 2025 15:24:18 -0400 Subject: [PATCH 24/48] Fix mypy errors and warnings --- pinecone/db_control/db_control.py | 14 +- pinecone/db_control/db_control_asyncio.py | 17 +- pinecone/db_data/index.py | 48 +++- pinecone/grpc/pinecone.py | 10 +- pinecone/inference/inference.py | 44 +++- pinecone/pinecone.py | 54 +++-- pinecone/pinecone_asyncio.py | 4 +- pinecone/utils/plugin_aware.py | 28 ++- tests/__init__.py | 5 - tests/integration/__init__.py | 3 + tests/integration/conftest.py | 40 ++++ .../integration/control/index/test_create.py | 2 +- .../control_asyncio/index/__init__.py | 0 .../control_asyncio/index/conftest.py | 206 ++++++++++++++++++ .../control_asyncio/index/test_create.py | 162 ++++++++++++++ tests/unit/models/test_index_model.py | 2 +- tests/unit/test_config.py | 20 +- tests/unit/test_plugin_aware.py | 14 +- .../test_grpc_index_initialization.py | 5 +- 19 files changed, 591 insertions(+), 87 deletions(-) create mode 100644 tests/integration/conftest.py create mode 100644 tests/integration/control_asyncio/index/__init__.py create mode 100644 tests/integration/control_asyncio/index/conftest.py create mode 100644 tests/integration/control_asyncio/index/test_create.py diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index d0cc92fd..69aef889 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -18,21 +18,21 @@ class DBControl: def __init__(self, config, openapi_config, pool_threads): - self.config = config + self._config = config """ @private """ - self.openapi_config = openapi_config + self._openapi_config = openapi_config """ @private """ - self.pool_threads = pool_threads + self._pool_threads = pool_threads """ @private """ self._index_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=ManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, - pool_threads=pool_threads, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, api_version=API_VERSION, ) """ @private """ @@ -48,7 +48,7 @@ def index(self) -> "IndexResource": if self._index_resource is None: from .resources.sync.index import IndexResource - self._index_resource = IndexResource(index_api=self._index_api, config=self.config) + self._index_resource = IndexResource(index_api=self._index_api, config=self._config) return self._index_resource @property diff --git a/pinecone/db_control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py index 2fce306e..91e3f179 100644 --- a/pinecone/db_control/db_control_asyncio.py +++ b/pinecone/db_control/db_control_asyncio.py @@ -17,15 +17,18 @@ class DBControlAsyncio: - def __init__(self, config, openapi_config, pool_threads): - self.config = config + def __init__(self, config, openapi_config): + self._config = config """ @private """ - self.index_api = setup_async_openapi_client( + self._openapi_config = openapi_config + """ @private """ + + self._index_api = setup_async_openapi_client( api_client_klass=AsyncioApiClient, api_klass=AsyncioManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, + config=self._config, + openapi_config=self._openapi_config, api_version=API_VERSION, ) """ @private """ @@ -42,7 +45,7 @@ def index(self) -> "IndexResourceAsyncio": from .resources.asyncio.index import IndexResourceAsyncio self._index_resource = IndexResourceAsyncio( - index_api=self.index_api, config=self.config + index_api=self._index_api, config=self._config ) return self._index_resource @@ -51,5 +54,5 @@ def collection(self) -> "CollectionResourceAsyncio": if self._collection_resource is None: from .resources.asyncio.collection import CollectionResourceAsyncio - self._collection_resource = CollectionResourceAsyncio(self.index_api) + self._collection_resource = CollectionResourceAsyncio(self._index_api) return self._collection_resource diff --git a/pinecone/db_data/index.py b/pinecone/db_data/index.py index a228bfbe..6c78b849 100644 --- a/pinecone/db_data/index.py +++ b/pinecone/db_data/index.py @@ -1,8 +1,8 @@ from pinecone.utils.tqdm import tqdm - +import warnings import logging import json -from typing import Union, List, Optional, Dict, Any, Literal +from typing import Union, List, Optional, Dict, Any, Literal, TYPE_CHECKING from pinecone.config import ConfigBuilder @@ -45,6 +45,9 @@ from concurrent.futures import as_completed +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + logger = logging.getLogger(__name__) """ @private """ @@ -70,29 +73,29 @@ def __init__( openapi_config=None, **kwargs, ): - self.config = ConfigBuilder.build( + self._config = ConfigBuilder.build( api_key=api_key, host=host, additional_headers=additional_headers, **kwargs ) """ @private """ - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, openapi_config) + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, openapi_config) """ @private """ if pool_threads is None: - self.pool_threads = 5 * cpu_count() + self._pool_threads = 5 * cpu_count() """ @private """ else: - self.pool_threads = pool_threads + self._pool_threads = pool_threads """ @private """ if kwargs.get("connection_pool_maxsize", None): - self.openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") + self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") self._vector_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=VectorOperationsApi, - config=self.config, - openapi_config=self.openapi_config, - pool_threads=pool_threads, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, api_version=API_VERSION, ) @@ -101,6 +104,31 @@ def __init__( # Pass the same api_client to the ImportFeatureMixin super().__init__(api_client=self._api_client) + @property + def config(self) -> "Config": + """@private""" + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._pool_threads + def _openapi_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]: return filter_dict(kwargs, OPENAPI_ENDPOINT_PARAMS) diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index 28a13622..7c869e8f 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -124,13 +124,13 @@ def Index(self, name: str = "", host: str = "", **kwargs): # Use host if it is provided, otherwise get host from describe_index index_host = host or self.db.index._get_host(name) - pt = kwargs.pop("pool_threads", None) or self.pool_threads + pt = kwargs.pop("pool_threads", None) or self._pool_threads config = ConfigBuilder.build( - api_key=self.config.api_key, + api_key=self._config.api_key, host=index_host, - source_tag=self.config.source_tag, - proxy_url=self.config.proxy_url, - ssl_ca_certs=self.config.ssl_ca_certs, + source_tag=self._config.source_tag, + proxy_url=self._config.proxy_url, + ssl_ca_certs=self._config.ssl_ca_certs, ) return GRPCIndex(index_name=name, config=config, pool_threads=pt, **kwargs) diff --git a/pinecone/inference/inference.py b/pinecone/inference/inference.py index 9ab34e33..62e6cbcd 100644 --- a/pinecone/inference/inference.py +++ b/pinecone/inference/inference.py @@ -1,5 +1,6 @@ import logging -from typing import Optional, Dict, List, Union, Any +import warnings +from typing import Optional, Dict, List, Union, Any, TYPE_CHECKING from pinecone.openapi_support import ApiClient from pinecone.core.openapi.inference.apis import InferenceApi @@ -7,13 +8,15 @@ from pinecone.core.openapi.inference import API_VERSION from pinecone.utils import setup_openapi_client, PluginAware - from .inference_request_builder import ( InferenceRequestBuilder, EmbedModel as EmbedModelEnum, RerankModel as RerankModelEnum, ) +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + logger = logging.getLogger(__name__) """ @private """ @@ -44,14 +47,14 @@ class Inference(PluginAware): EmbedModel = EmbedModelEnum RerankModel = RerankModelEnum - def __init__(self, config, openapi_config, **kwargs) -> None: - self.config = config + def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration", **kwargs) -> None: + self._config = config """ @private """ - self.openapi_config = openapi_config + self._openapi_config = openapi_config """ @private """ - self.pool_threads = kwargs.get("pool_threads", 1) + self._pool_threads = kwargs.get("pool_threads", 1) """ @private """ self.__inference_api = setup_openapi_client( @@ -59,12 +62,39 @@ def __init__(self, config, openapi_config, **kwargs) -> None: api_klass=InferenceApi, config=config, openapi_config=openapi_config, - pool_threads=kwargs.get("pool_threads", 1), + pool_threads=self._pool_threads, api_version=API_VERSION, ) super().__init__() # Initialize PluginAware + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._pool_threads + def embed( self, model: Union[EmbedModelEnum, str], diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 1d65cee1..73168bbc 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -3,10 +3,10 @@ from multiprocessing import cpu_count import warnings -from .legacy_pinecone_interface import LegacyPineconeDBControlInterface - from pinecone.config import PineconeConfig, ConfigBuilder +from .legacy_pinecone_interface import LegacyPineconeDBControlInterface + from pinecone.utils import normalize_host, PluginAware, docslinks from .langchain_import_warnings import _build_langchain_attribute_error_message @@ -14,6 +14,7 @@ """ @private """ if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration from pinecone.db_data import ( _Index as Index, _Inference as Inference, @@ -66,7 +67,7 @@ def __init__( f"Passing {deprecated_kwarg} is no longer supported. Please pass individual settings such as proxy_url, proxy_headers, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at {docslinks['README']} for examples." ) - self.config = PineconeConfig.build( + self._config = PineconeConfig.build( api_key=api_key, host=host, additional_headers=additional_headers, @@ -78,14 +79,14 @@ def __init__( ) """ @private """ - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, **kwargs) + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) """ @private """ if pool_threads is None: - self.pool_threads = 5 * cpu_count() + self._pool_threads = 5 * cpu_count() """ @private """ else: - self.pool_threads = pool_threads + self._pool_threads = pool_threads """ @private """ self._inference: Optional["Inference"] = None # Lazy initialization @@ -104,7 +105,7 @@ def inference(self) -> "Inference": if self._inference is None: from pinecone.db_data import _Inference - self._inference = _Inference(config=self.config, openapi_config=self.openapi_config) + self._inference = _Inference(config=self._config, openapi_config=self._openapi_config) return self._inference @property @@ -116,9 +117,9 @@ def db(self) -> "DBControl": from pinecone.db_control import DBControl self._db_control = DBControl( - config=self.config, - openapi_config=self.openapi_config, - pool_threads=self.pool_threads, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, ) return self._db_control @@ -132,6 +133,33 @@ def index_host_store(self) -> "IndexHostStore": ) return self.db.index._index_host_store + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._pool_threads + @property def index_api(self) -> "ManageIndexesApi": """@private""" @@ -242,9 +270,9 @@ def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": if name == "" and host == "": raise ValueError("Either name or host must be specified") - pt = kwargs.pop("pool_threads", None) or self.pool_threads - api_key = self.config.api_key - openapi_config = self.openapi_config + pt = kwargs.pop("pool_threads", None) or self._pool_threads + api_key = self._config.api_key + openapi_config = self._openapi_config if host != "": check_realistic_host(host) diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 504dcdd7..c4736591 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -164,9 +164,7 @@ def db(self): from .db_control.db_control_asyncio import DBControlAsyncio self._db_control = DBControlAsyncio( - config=self.config, - openapi_config=self.openapi_config, - pool_threads=self.pool_threads, + config=self.config, openapi_config=self.openapi_config ) return self._db_control diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index 92093fcb..4a27351a 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -17,8 +17,12 @@ class PluginAware: This class provides functionality to lazily load plugins when they are first accessed. Subclasses must set the following attributes before calling super().__init__(): - config: Config - - openapi_config: OpenApiConfig - - pool_threads: int + - _openapi_config: OpenApiConfig + - _pool_threads: int + + These attributes are considered private and should not be used by end users. The config property + is also considered private, but it was originally named without the underscore and this name + can't be changed without breaking compatibility with plugins in the wild. """ def __init__(self, *args: Any, **kwargs: Any) -> None: @@ -41,12 +45,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: missing_attrs = [] if not hasattr(self, "config"): missing_attrs.append("config") - if not hasattr(self, "openapi_config"): - missing_attrs.append("openapi_config") - if not hasattr(self, "pool_threads"): - missing_attrs.append("pool_threads") + if not hasattr(self, "_openapi_config"): + missing_attrs.append("_openapi_config") + if not hasattr(self, "_pool_threads"): + missing_attrs.append("_pool_threads") if missing_attrs: + logger.error( + f"PluginAware class requires the following attributes: {', '.join(missing_attrs)}. " + f"These must be set in the {self.__class__.__name__} class's __init__ method " + f"before calling super().__init__()." + ) raise AttributeError( f"PluginAware class requires the following attributes: {', '.join(missing_attrs)}. " f"These must be set in the {self.__class__.__name__} class's __init__ method " @@ -67,8 +76,9 @@ def __getattr__(self, name: str) -> Any: Raises: AttributeError: If the attribute cannot be found after loading plugins. """ + logger.debug("__getattr__ called for %s", name) # Check if this is one of the required attributes that should be set by subclasses - required_attrs = ["config", "openapi_config", "pool_threads"] + required_attrs = ["config", "_openapi_config", "_pool_threads"] if name in required_attrs: raise AttributeError( f"'{self.__class__.__name__}' object has no attribute '{name}'. " @@ -81,8 +91,8 @@ def __getattr__(self, name: str) -> Any: # Use object.__getattribute__ to avoid triggering __getattr__ again try: config = object.__getattribute__(self, "config") - openapi_config = object.__getattribute__(self, "openapi_config") - pool_threads = object.__getattribute__(self, "pool_threads") + openapi_config = object.__getattribute__(self, "_openapi_config") + pool_threads = object.__getattribute__(self, "_pool_threads") self.load_plugins( config=config, openapi_config=openapi_config, pool_threads=pool_threads ) diff --git a/tests/__init__.py b/tests/__init__.py index 84ca0481..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +0,0 @@ -import logging - -# logging.basicConfig( -# format="%(levelname)s [%(asctime)s] %(name)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" -# ) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index e69de29b..76acad39 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -0,0 +1,3 @@ +import dotenv + +dotenv.load_dotenv() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 00000000..181ab04a --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,40 @@ +import logging +from pinecone import Pinecone +from datetime import datetime, timedelta + +logger = logging.getLogger(__name__) + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + try: + # Initialize Pinecone client + pc = Pinecone() + + # Get all indexes + indexes = pc.list_indexes() + + # Find test indexes (those created during this test run) + test_indexes = [idx for idx in indexes.names() if idx.startswith("test-")] + + # Delete test indexes that are older than 1 hour (in case of failed cleanup) + for index_name in test_indexes: + try: + description = pc.describe_index(name=index_name) + created_at = datetime.fromisoformat(description.created_at.replace("Z", "+00:00")) + + if datetime.now(created_at.tzinfo) - created_at > timedelta(hours=1): + logger.info(f"Cleaning up old test index: {index_name}") + pc.delete_index(index_name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to clean up index {index_name}: {str(e)}") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup completed") diff --git a/tests/integration/control/index/test_create.py b/tests/integration/control/index/test_create.py index 75ffabf2..a3aa4406 100644 --- a/tests/integration/control/index/test_create.py +++ b/tests/integration/control/index/test_create.py @@ -203,7 +203,7 @@ def test_pod_index_does_not_support_sparse_vectors(self, pc, index_name, index_t pc.db.index.create( name=index_name, metric="dotproduct", - spec=PodSpec(environment=PodIndexEnvironment.AWS_US_EAST_1), + spec=PodSpec(environment=PodIndexEnvironment.US_EAST1_AWS), vector_type="sparse", tags=index_tags, ) diff --git a/tests/integration/control_asyncio/index/__init__.py b/tests/integration/control_asyncio/index/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/index/conftest.py b/tests/integration/control_asyncio/index/conftest.py new file mode 100644 index 00000000..ea17bc58 --- /dev/null +++ b/tests/integration/control_asyncio/index/conftest.py @@ -0,0 +1,206 @@ +import pytest +import time +import random +import asyncio +from ...helpers import get_environment_var, generate_index_name +import logging +from typing import Callable, Optional, Awaitable, Union + +from pinecone import ( + CloudProvider, + AwsRegion, + ServerlessSpec, + PineconeApiException, + NotFoundException, +) + +logger = logging.getLogger(__name__) + + +def build_client(): + from pinecone import PineconeAsyncio + + return PineconeAsyncio() + + +@pytest.fixture(scope="session") +def client(): + # This returns the sync client. Not for use in tests + # but can be used to help with cleanup after test runs + from pinecone import Pinecone + + return Pinecone() + + +@pytest.fixture(scope="session") +def build_pc(): + return build_client + + +async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec) + + +@pytest.fixture() +def random_vector(): + return [random.uniform(0, 1) for _ in range(10)] + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(client, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + client.create_index(**create_sl_index_params) + yield index_name + client.delete_index(index_name, -1) + + +@pytest.fixture() +def notready_sl_index(client, index_name, create_sl_index_params): + client.create_index(**create_sl_index_params, timeout=-1) + yield index_name + + +def delete_with_retry(client, index_name, retries=0, sleep_interval=5): + print( + "Deleting index " + + index_name + + ", retry " + + str(retries) + + ", next sleep interval " + + str(sleep_interval) + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + + +@pytest.fixture(autouse=True) +async def cleanup(client, index_name): + yield + + try: + logger.debug("Attempting to delete index with name: " + index_name) + client.index.delete(name=index_name, timeout=-1) + except Exception: + pass diff --git a/tests/integration/control_asyncio/index/test_create.py b/tests/integration/control_asyncio/index/test_create.py new file mode 100644 index 00000000..b85cfebc --- /dev/null +++ b/tests/integration/control_asyncio/index/test_create.py @@ -0,0 +1,162 @@ +import pytest +from pinecone import ( + PineconeAsyncio, + Metric, + VectorType, + DeletionProtection, + ServerlessSpec, + CloudProvider, + AwsRegion, +) + + +@pytest.mark.asyncio +class TestAsyncioCreateIndex: + @pytest.mark.parametrize("spec_fixture", ("spec1", "spec2", "spec3")) + async def test_create_index(self, index_name, request, spec_fixture): + pc = PineconeAsyncio() + spec = request.getfixturevalue(spec_fixture) + + resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec) + + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" # default value + assert resp.vector_type == "dense" # default value + assert resp.deletion_protection == "disabled" # default value + + desc = await pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.deletion_protection == "disabled" # default value + assert desc.vector_type == "dense" # default value + await pc.close() + + async def test_create_skip_wait(self, index_name, spec1): + pc = PineconeAsyncio() + resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec1, timeout=-1) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + await pc.close() + + async def test_create_infinite_wait(self, index_name, spec1): + async with PineconeAsyncio() as pc: + resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec1, timeout=None) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + async def test_create_default_index_with_metric(self, index_name, metric, spec1): + pc = PineconeAsyncio() + + await pc.db.index.create(name=index_name, dimension=10, spec=spec1, metric=metric) + desc = await pc.db.index.describe(index_name) + if isinstance(metric, str): + assert desc.metric == metric + else: + assert desc.metric == metric.value + assert desc.vector_type == "dense" + await pc.close() + + @pytest.mark.parametrize( + "metric_enum,vector_type_enum,dim,tags", + [ + (Metric.COSINE, VectorType.DENSE, 10, None), + (Metric.EUCLIDEAN, VectorType.DENSE, 10, {"env": "prod"}), + (Metric.DOTPRODUCT, VectorType.SPARSE, None, {"env": "dev"}), + ], + ) + async def test_create_with_enum_values_and_tags( + self, index_name, metric_enum, vector_type_enum, dim, tags + ): + pc = PineconeAsyncio() + args = { + "name": index_name, + "metric": metric_enum, + "vector_type": vector_type_enum, + "deletion_protection": DeletionProtection.DISABLED, + "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + "tags": tags, + } + if dim is not None: + args["dimension"] = dim + + await pc.db.index.create(**args) + + desc = await pc.db.index.describe(index_name) + assert desc.metric == metric_enum.value + assert desc.vector_type == vector_type_enum.value + assert desc.dimension == dim + assert desc.deletion_protection == DeletionProtection.DISABLED.value + assert desc.name == index_name + assert desc.spec.serverless.cloud == "aws" + assert desc.spec.serverless.region == "us-east-1" + if tags: + assert desc.tags.to_dict() == tags + await pc.close() + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + async def test_create_dense_index_with_metric(self, index_name, spec1, metric): + pc = PineconeAsyncio() + + await pc.create_index( + name=index_name, dimension=10, spec=spec1, metric=metric, vector_type=VectorType.DENSE + ) + + desc = await pc.db.index.describe(index_name) + assert desc.metric == metric + assert desc.vector_type == "dense" + await pc.close() + + async def test_create_with_optional_tags(self, index_name, spec1): + pc = PineconeAsyncio() + tags = {"foo": "FOO", "bar": "BAR"} + + await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=tags) + + desc = await pc.db.index.describe(index_name) + assert desc.tags.to_dict() == tags + await pc.close() + + async def test_create_sparse_index(self, index_name, spec1): + pc = PineconeAsyncio() + + await pc.create_index( + name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE + ) + + desc = await pc.db.index.describe(index_name) + assert desc.vector_type == "sparse" + assert desc.dimension is None + assert desc.vector_type == "sparse" + assert desc.metric == "dotproduct" + await pc.close() + + async def test_create_with_deletion_protection(self, index_name, spec1): + pc = PineconeAsyncio() + + await pc.create_index( + name=index_name, + spec=spec1, + metric=Metric.DOTPRODUCT, + vector_type=VectorType.SPARSE, + deletion_protection=DeletionProtection.ENABLED, + ) + + desc = await pc.db.index.describe(index_name) + assert desc.deletion_protection == "enabled" + assert desc.metric == "dotproduct" + assert desc.vector_type == "sparse" + assert desc.dimension is None + + with pytest.raises(Exception): + await pc.delete_index(index_name) + + await pc.configure_index(index_name, deletion_protection=DeletionProtection.DISABLED) + + desc2 = await pc.db.index.describe(index_name) + assert desc2.deletion_protection == "disabled" + await pc.close() diff --git a/tests/unit/models/test_index_model.py b/tests/unit/models/test_index_model.py index 7320ce8d..7aeb88d1 100644 --- a/tests/unit/models/test_index_model.py +++ b/tests/unit/models/test_index_model.py @@ -5,7 +5,7 @@ ServerlessSpec, DeletionProtection, ) -from pinecone.models import IndexModel +from pinecone.db_control.models import IndexModel from pinecone import CloudProvider, AwsRegion diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 08e2a493..90ce4c1f 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -115,8 +115,8 @@ def test_ssl_config_passed_to_index_client(self): proxy_headers = make_headers(proxy_basic_auth="asdf") pc = Pinecone(api_key="key", ssl_ca_certs="path/to/cert", proxy_headers=proxy_headers) - assert pc.openapi_config.ssl_ca_cert == "path/to/cert" - assert pc.openapi_config.proxy_headers == proxy_headers + assert pc._openapi_config.ssl_ca_cert == "path/to/cert" + assert pc._openapi_config.proxy_headers == proxy_headers idx = pc.Index(host="host.pinecone.io") assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" @@ -126,16 +126,16 @@ def test_host_config_not_clobbered_by_index(self): proxy_headers = make_headers(proxy_basic_auth="asdf") pc = Pinecone(api_key="key", ssl_ca_certs="path/to/cert", proxy_headers=proxy_headers) - assert pc.openapi_config.ssl_ca_cert == "path/to/cert" - assert pc.openapi_config.proxy_headers == proxy_headers - assert pc.openapi_config.host == "https://api.pinecone.io" + assert pc._openapi_config.ssl_ca_cert == "path/to/cert" + assert pc._openapi_config.proxy_headers == proxy_headers + assert pc._openapi_config.host == "https://api.pinecone.io" idx = pc.Index(host="host.pinecone.io") assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" assert idx._vector_api.api_client.configuration.proxy_headers == proxy_headers assert idx._vector_api.api_client.configuration.host == "https://host.pinecone.io" - assert pc.openapi_config.host == "https://api.pinecone.io" + assert pc._openapi_config.host == "https://api.pinecone.io" def test_proxy_config(self): pc = Pinecone( @@ -144,11 +144,11 @@ def test_proxy_config(self): ssl_ca_certs="path/to/cert-bundle.pem", ) - assert pc.config.proxy_url == "http://localhost:8080" - assert pc.config.ssl_ca_certs == "path/to/cert-bundle.pem" + assert pc._config.proxy_url == "http://localhost:8080" + assert pc._config.ssl_ca_certs == "path/to/cert-bundle.pem" - assert pc.openapi_config.proxy == "http://localhost:8080" - assert pc.openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" + assert pc._openapi_config.proxy == "http://localhost:8080" + assert pc._openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" # DBControl object is created lazily, so we need to access this property # to trigger the setup so we can inspect the config diff --git a/tests/unit/test_plugin_aware.py b/tests/unit/test_plugin_aware.py index 315bd225..a2912bfa 100644 --- a/tests/unit/test_plugin_aware.py +++ b/tests/unit/test_plugin_aware.py @@ -13,16 +13,16 @@ def __init__(self): with pytest.raises(AttributeError) as e: Foo() - assert "config" in str(e.value) - assert "openapi_config" in str(e.value) - assert "pool_threads" in str(e.value) + assert "_config" in str(e.value) + assert "_openapi_config" in str(e.value) + assert "_pool_threads" in str(e.value) def test_correctly_raise_attribute_errors(self): class Foo(PluginAware): def __init__(self): self.config = Config() - self.openapi_config = OpenApiConfiguration() - self.pool_threads = 1 + self._openapi_config = OpenApiConfiguration() + self._pool_threads = 1 super().__init__() @@ -37,8 +37,8 @@ def test_plugins_are_lazily_loaded(self): class Pinecone(PluginAware): def __init__(self): self.config = Config() - self.openapi_config = OpenApiConfiguration() - self.pool_threads = 10 + self._openapi_config = OpenApiConfiguration() + self._pool_threads = 10 super().__init__() diff --git a/tests/unit_grpc/test_grpc_index_initialization.py b/tests/unit_grpc/test_grpc_index_initialization.py index 710c3f26..b0b64250 100644 --- a/tests/unit_grpc/test_grpc_index_initialization.py +++ b/tests/unit_grpc/test_grpc_index_initialization.py @@ -43,7 +43,7 @@ def test_config_passed_when_target_by_name(self): # Set this state in the host store to skip network call # to find host for name - pc.index_host_store.set_host(pc.config, "my-index", "myhost") + pc.db.index._index_host_store.set_host(pc._config, "my-index", "myhost") config = GRPCClientConfig(timeout=10, secure=False) index = pc.Index(name="my-index", grpc_config=config) @@ -88,5 +88,6 @@ def test_config_passed_when_target_by_host_and_port(self): def test_config_passes_source_tag_when_set(self): pc = PineconeGRPC(api_key="YOUR_API_KEY", source_tag="my_source_tag") assert ( - re.search(r"source_tag=my_source_tag", pc.index_api.api_client.user_agent) is not None + re.search(r"source_tag=my_source_tag", pc.db._index_api.api_client.user_agent) + is not None ) From 7e94a40d3cd01c8b8d971b2a7edea7440c836752 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 7 May 2025 15:46:27 -0400 Subject: [PATCH 25/48] Fix inference --- pinecone/pinecone.py | 4 ++-- pinecone/pinecone_asyncio.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 73168bbc..3fac67c6 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -295,8 +295,8 @@ def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": def IndexAsyncio(self, host: str, **kwargs) -> "IndexAsyncio": from pinecone.db_data import _IndexAsyncio - api_key = self.config.api_key - openapi_config = self.openapi_config + api_key = self._config.api_key + openapi_config = self._openapi_config if host is None or host == "": raise ValueError("A host must be specified") diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index c4736591..993ba14e 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -83,7 +83,7 @@ def __init__( f"You have passed {unimplemented_kwarg} but this configuration has not been implemented for PineconeAsyncio." ) - self.config = PineconeConfig.build( + self._config = PineconeConfig.build( api_key=api_key, host=host, additional_headers=additional_headers, @@ -95,7 +95,7 @@ def __init__( ) """ @private """ - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, **kwargs) + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) """ @private """ self._inference = None # Lazy initialization @@ -147,7 +147,7 @@ async def main(): ``` """ - await self.index_api.api_client.close() + await self.db._index_api.api_client.close() @property def inference(self): @@ -155,7 +155,7 @@ def inference(self): if self._inference is None: from pinecone.db_data import _AsyncioInference - self._inference = _AsyncioInference(api_client=self.index_api.api_client) + self._inference = _AsyncioInference(api_client=self.db._index_api.api_client) return self._inference @property @@ -164,7 +164,7 @@ def db(self): from .db_control.db_control_asyncio import DBControlAsyncio self._db_control = DBControlAsyncio( - config=self.config, openapi_config=self.openapi_config + config=self._config, openapi_config=self._openapi_config ) return self._db_control From 2d65da76a798d010ed91e0092afb7abe6da9127b Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Thu, 8 May 2025 09:51:23 -0400 Subject: [PATCH 26/48] Fix data tests --- pinecone/__init__.py | 2 +- pinecone/pinecone.py | 4 +--- pinecone/pinecone_asyncio.py | 4 +--- tests/integration/conftest.py | 2 +- tests/integration/control/index/conftest.py | 21 ++--------------- tests/integration/data/conftest.py | 25 ++++++++++++++++----- tests/integration/data/seed.py | 4 +++- tests/integration/helpers/__init__.py | 1 + tests/integration/helpers/helpers.py | 19 ++++++++++++++++ 9 files changed, 48 insertions(+), 34 deletions(-) diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 51112c63..8d4d306e 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -26,7 +26,7 @@ "SearchQuery": ("pinecone.db_data.dataclasses", "SearchQuery"), "SearchQueryVector": ("pinecone.db_data.dataclasses", "SearchQueryVector"), "SearchRerank": ("pinecone.db_data.dataclasses", "SearchRerank"), - "FetchResponse": ("pinecone.db_data.models", "FetchResponse"), + "FetchResponse": ("pinecone.db_data.dataclasses", "FetchResponse"), "DeleteRequest": ("pinecone.db_data.models", "DeleteRequest"), "DescribeIndexStatsRequest": ("pinecone.db_data.models", "DescribeIndexStatsRequest"), "DescribeIndexStatsResponse": ("pinecone.db_data.models", "IndexDescription"), diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 3fac67c6..e5bc112a 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -199,9 +199,7 @@ def create_index_for_model( region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", timeout: Optional[int] = None, ) -> "IndexModel": return self.db.index.create_for_model( diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 993ba14e..b56b1d2f 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -220,9 +220,7 @@ async def create_index_for_model( region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", timeout: Optional[int] = None, ) -> "IndexModel": return await self.db.index.create_for_model( diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 181ab04a..e42eedae 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -30,7 +30,7 @@ def pytest_sessionfinish(session, exitstatus): if datetime.now(created_at.tzinfo) - created_at > timedelta(hours=1): logger.info(f"Cleaning up old test index: {index_name}") - pc.delete_index(index_name, timeout=-1) + pc.delete_index(name=index_name, timeout=-1) except Exception as e: logger.warning(f"Failed to clean up index {index_name}: {str(e)}") diff --git a/tests/integration/control/index/conftest.py b/tests/integration/control/index/conftest.py index 805795f6..985c4bb6 100644 --- a/tests/integration/control/index/conftest.py +++ b/tests/integration/control/index/conftest.py @@ -3,10 +3,8 @@ import time import logging import dotenv -import os -from datetime import datetime from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var +from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper dotenv.load_dotenv() @@ -19,22 +17,7 @@ @pytest.fixture() def index_tags(request): - test_name = request.node.name - if test_name is None: - test_name = "" - else: - test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") - - tags = { - "test-suite": "pinecone-python-client", - "test-run": RUN_ID, - "test": test_name, - "created-at": datetime.now().strftime("%Y-%m-%d"), - } - - if os.getenv("USER"): - tags["user"] = os.getenv("USER") - return tags + return index_tags_helper(request, RUN_ID) @pytest.fixture() diff --git a/tests/integration/data/conftest.py b/tests/integration/data/conftest.py index c7498cb8..9fa7b997 100644 --- a/tests/integration/data/conftest.py +++ b/tests/integration/data/conftest.py @@ -1,12 +1,20 @@ import pytest import os import json -from ..helpers import get_environment_var, generate_index_name +import uuid +from ..helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper import logging from pinecone import EmbedModel, CloudProvider, AwsRegion, IndexEmbed logger = logging.getLogger(__name__) +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture(scope="session") +def index_tags(request): + return index_tags_helper(request, RUN_ID) + def api_key(): return get_environment_var("PINECONE_API_KEY") @@ -90,7 +98,7 @@ def model_idx(client, model_index_name, model_index_host): @pytest.fixture(scope="session") -def model_index_host(model_index_name): +def model_index_host(model_index_name, index_tags): pc = build_client() if model_index_name not in pc.list_indexes().names(): @@ -104,6 +112,7 @@ def model_index_host(model_index_name): field_map={"text": "my_text_field"}, metric="cosine", ), + tags=index_tags, ) else: logger.info(f"Index {model_index_name} already exists") @@ -116,12 +125,12 @@ def model_index_host(model_index_name): @pytest.fixture(scope="session") -def index_host(index_name, metric, spec): +def index_host(index_name, metric, spec, index_tags): pc = build_client() if index_name not in pc.list_indexes().names(): logger.info(f"Creating index {index_name}") - pc.create_index(name=index_name, dimension=2, metric=metric, spec=spec) + pc.create_index(name=index_name, dimension=2, metric=metric, spec=spec, tags=index_tags) else: logger.info(f"Index {index_name} already exists") @@ -133,13 +142,17 @@ def index_host(index_name, metric, spec): @pytest.fixture(scope="session") -def sparse_index_host(sparse_index_name, spec): +def sparse_index_host(sparse_index_name, spec, index_tags): pc = build_client() if sparse_index_name not in pc.list_indexes().names(): logger.info(f"Creating index {sparse_index_name}") pc.create_index( - name=sparse_index_name, metric="dotproduct", spec=spec, vector_type="sparse" + name=sparse_index_name, + metric="dotproduct", + spec=spec, + vector_type="sparse", + tags=index_tags, ) else: logger.info(f"Index {sparse_index_name} already exists") diff --git a/tests/integration/data/seed.py b/tests/integration/data/seed.py index 827aea9a..2019761e 100644 --- a/tests/integration/data/seed.py +++ b/tests/integration/data/seed.py @@ -1,6 +1,5 @@ from ..helpers import poll_fetch_for_ids_in_namespace, embedding_values from pinecone import Vector - import itertools @@ -133,3 +132,6 @@ def setup_weird_ids_data(idx, target_namespace, wait): for i in range(0, len(weird_ids), batch_size): chunk = weird_ids[i : i + batch_size] idx.upsert(vectors=[(x, embedding_values(2)) for x in chunk], namespace=target_namespace) + + if wait: + poll_fetch_for_ids_in_namespace(idx, ids=weird_ids, namespace=target_namespace) diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py index f233d089..3b680b3d 100644 --- a/tests/integration/helpers/__init__.py +++ b/tests/integration/helpers/__init__.py @@ -8,4 +8,5 @@ poll_fetch_for_ids_in_namespace, embedding_values, jsonprint, + index_tags, ) diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 6688f288..d9990df4 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -123,3 +123,22 @@ def fake_api_key(): def jsonprint(obj): print(json.dumps(obj.to_dict(), indent=2)) + + +def index_tags(request, run_id): + test_name = request.node.name + if test_name is None: + test_name = "" + else: + test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") + + tags = { + "test-suite": "pinecone-python-client", + "test-run": run_id, + "test": test_name, + "created-at": datetime.now().strftime("%Y-%m-%d"), + } + + if os.getenv("USER"): + tags["user"] = os.getenv("USER") + return tags From f2a3e82916080dd875d511da869fd250317b0fa8 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Thu, 8 May 2025 11:42:15 -0400 Subject: [PATCH 27/48] Fix mypy errors --- pinecone/pinecone_asyncio.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index b56b1d2f..d9906362 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -276,8 +276,8 @@ async def describe_collection(self, name: str): def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": from pinecone.db_data import _IndexAsyncio - api_key = self.config.api_key - openapi_config = self.openapi_config + api_key = self._config.api_key + openapi_config = self._openapi_config if host is None or host == "": raise ValueError("A host must be specified") @@ -289,6 +289,6 @@ def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": host=index_host, api_key=api_key, openapi_config=openapi_config, - source_tag=self.config.source_tag, + source_tag=self._config.source_tag, **kwargs, ) From 497b0f9b519d89d48efadb8ba32c08c8753df691 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Thu, 8 May 2025 12:57:43 -0400 Subject: [PATCH 28/48] Add missing exports --- pinecone/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 8d4d306e..b2c0f5c8 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -53,6 +53,8 @@ "pinecone.db_data.errors", "SparseValuesDictionaryExpectedError", ), + "Index": ("pinecone.db_data.import_error", "Index"), + "Inference": ("pinecone.db_data.import_error", "Inference"), } _db_control_lazy_imports = { From 7a37ef55c588dc7abba8d26b7b62dee3731ae292 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Thu, 8 May 2025 15:09:18 -0400 Subject: [PATCH 29/48] Fix async tests --- pinecone/pinecone_asyncio.py | 8 +++---- tests/integration/control_asyncio/conftest.py | 2 +- .../control_asyncio/test_create_index.py | 22 +++++++++---------- 3 files changed, 14 insertions(+), 18 deletions(-) diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index d9906362..278039e6 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -193,12 +193,10 @@ async def create_index( name: str, spec: Union[Dict, "ServerlessSpec", "PodSpec"], dimension: Optional[int] = None, - metric: Optional[Union["Metric", str]] = "Metric.COSINE", + metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", - vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", tags: Optional[Dict[str, str]] = None, ) -> "IndexModel": resp = await self.db.index.create( diff --git a/tests/integration/control_asyncio/conftest.py b/tests/integration/control_asyncio/conftest.py index 99cf3c91..33c2b529 100644 --- a/tests/integration/control_asyncio/conftest.py +++ b/tests/integration/control_asyncio/conftest.py @@ -201,6 +201,6 @@ def cleanup(client, index_name): try: logger.debug("Attempting to delete index with name: " + index_name) - client.delete_index(index_name, -1) + client.delete_index(name=index_name, timeout=-1) except Exception: pass diff --git a/tests/integration/control_asyncio/test_create_index.py b/tests/integration/control_asyncio/test_create_index.py index 334ba86a..683c53a8 100644 --- a/tests/integration/control_asyncio/test_create_index.py +++ b/tests/integration/control_asyncio/test_create_index.py @@ -122,18 +122,16 @@ async def test_create_with_optional_tags(self, index_name, spec1): await pc.close() async def test_create_sparse_index(self, index_name, spec1): - pc = PineconeAsyncio() - - await pc.create_index( - name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE - ) - - desc = await pc.describe_index(index_name) - assert desc.vector_type == "sparse" - assert desc.dimension is None - assert desc.vector_type == "sparse" - assert desc.metric == "dotproduct" - await pc.close() + async with PineconeAsyncio() as pc: + await pc.create_index( + name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE + ) + + desc = await pc.describe_index(index_name) + assert desc.vector_type == "sparse" + assert desc.dimension is None + assert desc.vector_type == "sparse" + assert desc.metric == "dotproduct" async def test_create_with_deletion_protection(self, index_name, spec1): pc = PineconeAsyncio() From 1bb8551a4eed2f9e196a1405a576d7bcc70b7c64 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 14 May 2025 00:59:46 -0400 Subject: [PATCH 30/48] Add development repl script --- pyproject.toml | 3 +++ scripts/repl.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 scripts/repl.py diff --git a/pyproject.toml b/pyproject.toml index 788b2870..0a239e3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,9 @@ classifiers=[ "Topic :: Software Development :: Libraries :: Python Modules" ] +[tool.poetry.scripts] +repl = "scripts.repl:main" + [tool.poetry.dependencies] python = "^3.9" typing-extensions = ">=3.7.4" diff --git a/scripts/repl.py b/scripts/repl.py new file mode 100644 index 00000000..cacf3355 --- /dev/null +++ b/scripts/repl.py @@ -0,0 +1,38 @@ +import dotenv +import code +from pinecone import Pinecone +import logging + + +def main(): + # You can add any setup code here, such as: + # - Setting environment variables + # - Importing commonly used modules + # - Setting up logging + # - Loading configuration files + + dotenv.load_dotenv() + logging.basicConfig( + level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" + ) + + # Start the interactive REPL + banner = """ + Welcome to the custom Python REPL! + Your initialization steps have been completed. + """ + + # Create a custom namespace with any pre-loaded variables + namespace = { + "__name__": "__main__", + "__doc__": None, + "pc": Pinecone(), + # Add any other variables you want to have available in the REPL + } + + # Start the interactive console + code.interact(banner=banner, local=namespace) + + +if __name__ == "__main__": + main() From 5c0e37c41d662e55abe9dd84be74069c619b137a Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Wed, 14 May 2025 03:17:20 -0400 Subject: [PATCH 31/48] Implement backup & restore (#479) ## Problem Implement backup & restore ## Solution Added new methods to `Pinecone` and `PineconeAsyncio`: - `create_index_from_backup` - `create_backup` - `list_backups` - `describe_backup` - `delete_backup` - `list_restore_jobs` - `describe_restore_job` These can also be accessed with the new-style syntax, e.g. `pc.db.index.create_from_backup`, `pc.db.backup.create`, `pc.db.restore_job.list`. More Details: - Had to re-run codegen to pull in recent spec changes - Organize implementation around resource-types - Expose legacy-style names (`create_backup`, `create_index_from_backups`) as well as new-style names `pc.db.index.create_from_backup`. In the upcoming release, both styles will be present. We still need to do some work to reorg methods for some less-used parts of the client (bulk imports, etc) before transitioning fully to the new style in examples and documentation. - For new methods being added, begin enforcing keyword argument usage with a new `@kwargs_required` decorator. I will probably follow up and add this to all new methods added in the recent refactoring PR. Keyword arguments are strongly preferred over positional arguments because the keyword labels act as documentation and having the keyword labels makes them order-independent. This gives a lot of flexibility to expand the signature or change things from required to optional later without creating breaking changes for callers. - Wire up the code paths for new methods: - `Pinecone > DbControl > BackupResource` - `Pinecone > DbControl > IndexResource` - `Pinecone > DbControl > RestoreJobResource` - `PineconeAsyncio > DbControlAsyncio > AsyncioBackupResource` - `PineconeAsyncio > DbControlAsyncio > AsyncioIndexResource` - `PineconeAsyncio > DbControlAsyncio > AsyncioRestoreJobResource` - Update interface classes so that docs will show information about the new methods. ## Usage ### Initial setup ```python from pinecone import Pinecone, ServerlessSpec pc = Pinecone(api_key='key') # First you need an index pc.create_index( name='foo', dimension=2, metric='cosine', spec=ServerlessSpec(cloud='aws', region='us-east-1') ) # Upsert some fake data just for demonstration purposes import random idx = pc.Index(name='foo') idx.upsert( vectors=[ (str(i), [random.random(), random.random()] for i in range(1000) ] ) ``` ### Backups ```python pc.create_backup( index_name='foo', backup_name='bar', description='an example backup' ) # Describe a backup pc.describe_backup(backup_id='7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1') # { # "backup_id": "7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1", # "source_index_name": "foo", # "source_index_id": "4c292a8a-77cc-4a37-917d-51c6051a80bf", # "status": "Ready", # "cloud": "aws", # "region": "us-east-1", # "tags": {}, # "name": "bar", # "description": "", # "dimension": 2, # "record_count": 1000, # "namespace_count": 1, # "size_bytes": 289392, # "created_at": "2025-05-13T14:15:16.908702Z" # } # List backups pc.list_backups() # [ # { # "backup_id": "7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1", # "source_index_name": "foo", # "source_index_id": "4c292a8a-77cc-4a37-917d-51c6051a80bf", # "status": "Ready", # "cloud": "aws", # "region": "us-east-1", # "tags": {}, # "name": "bar", # "description": "", # "dimension": 2, # "record_count": 1000, # "namespace_count": 1, # "size_bytes": 289392, # "created_at": "2025-05-13T14:15:16.908702Z" # } # ] # Delete backup pc.delete_backup(backup_id='7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1') ``` ### Creating an index from backup ```python # Create index from backup pc.create_index_from_backup( backup_id='7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1', name='foo2', deletion_protection='enabled', tags={'env': 'testing'} ) # { # "name": "foo2", # "metric": "cosine", # "host": "foo2-dojoi3u.svc.aped-4627-b74a.pinecone.io", # "spec": { # "serverless": { # "cloud": "aws", # "region": "us-east-1" # } # }, # "status": { # "ready": true, # "state": "Ready" # }, # "vector_type": "dense", # "dimension": 2, # "deletion_protection": "enabled", # "tags": { # "env": "testing" # } # } ``` ### Restore job ```python # List jobs pc.list_restore_jobs() # {'data': [{'backup_id': 'e5957dc2-a76e-4b72-9645-569fb7ec143f', # 'completed_at': datetime.datetime(2025, 5, 13, 14, 56, 13, 939921, tzinfo=tzutc()), # 'created_at': datetime.datetime(2025, 5, 13, 14, 56, 4, 534826, tzinfo=tzutc()), # 'percent_complete': 100.0, # 'restore_job_id': '744ea5bd-7ddc-44ce-81f5-cfb876572e59', # 'status': 'Completed', # 'target_index_id': '572130f9-cfdd-42bf-a280-4218cd112bf8', # 'target_index_name': 'foo2'}, # {'backup_id': '7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1', # 'completed_at': datetime.datetime(2025, 5, 13, 16, 27, 10, 290234, tzinfo=tzutc()), # 'created_at': datetime.datetime(2025, 5, 13, 16, 27, 6, 130522, tzinfo=tzutc()), # 'percent_complete': 100.0, # 'restore_job_id': '06aa5739-2785-4121-b71b-99b73c3e3247', # 'status': 'Completed', # 'target_index_id': 'd3f31cd1-b077-4bcf-8e7d-d091d408c82b', # 'target_index_name': 'foo2'}], # 'pagination': None} # Describe jobs pc.describe_restore_job(job_id='504dd1a9-e3cd-420f-8756-65d5411fcb10') # {'backup_id': '7c8e6fcf-577b-4df5-9869-3c67f0f3d6e1', # 'completed_at': datetime.datetime(2025, 5, 13, 15, 55, 10, 108584, tzinfo=tzutc()), # 'created_at': datetime.datetime(2025, 5, 13, 15, 54, 49, 925105, tzinfo=tzutc()), # 'percent_complete': 100.0, # 'restore_job_id': '504dd1a9-e3cd-420f-8756-65d5411fcb10', # 'status': 'Completed', # 'target_index_id': 'b5607ee7-be78-4401-aaf5-ea20413f409d', # 'target_index_name': 'foo4'} ``` ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Test Plan Describe specific steps for validating this change. --- .github/workflows/testing-integration.yaml | 16 +- codegen/apis | 2 +- codegen/python-oas-templates | 2 +- pinecone/__init__.py | 4 + .../db_control/api/manage_indexes_api.py | 73 +-- .../model/{dedicated_spec.py => byoc_spec.py} | 8 +- .../db_control/model/create_backup_request.py | 8 +- .../create_index_from_backup_response.py | 272 +++++++++++ .../db_control/model/deletion_protection.py | 8 +- .../db_control/model/index_model_spec.py | 12 +- .../openapi/db_control/model/index_spec.py | 12 +- .../openapi/db_control/models/__init__.py | 5 +- .../db_data/api/bulk_operations_api.py | 16 +- .../db_data/api/namespace_operations_api.py | 443 ++++++++++++++++++ .../db_data/api/vector_operations_api.py | 32 +- .../core/openapi/db_data/apis/__init__.py | 1 + .../openapi/db_data/model/delete_request.py | 4 +- .../model/describe_index_stats_request.py | 4 +- .../db_data/model/list_namespaces_response.py | 274 +++++++++++ .../db_data/model/namespace_description.py | 264 +++++++++++ .../openapi/db_data/model/query_request.py | 4 +- .../model/search_records_request_query.py | 4 +- .../model/search_records_request_rerank.py | 12 +- .../db_data/model/start_import_request.py | 4 +- .../core/openapi/db_data/models/__init__.py | 2 + .../openapi/inference/api/inference_api.py | 8 +- .../openapi/inference/model/embed_request.py | 8 +- .../openapi/inference/model/model_info.py | 54 ++- .../openapi/inference/model/rerank_request.py | 12 +- pinecone/db_control/db_control.py | 29 +- pinecone/db_control/db_control_asyncio.py | 27 +- pinecone/db_control/models/__init__.py | 9 + pinecone/db_control/models/backup_list.py | 49 ++ pinecone/db_control/models/backup_model.py | 23 + pinecone/db_control/models/index_model.py | 5 + .../db_control/models/restore_job_list.py | 50 ++ .../db_control/models/restore_job_model.py | 25 + pinecone/db_control/repr_overrides.py | 5 +- pinecone/db_control/request_factory.py | 19 +- .../db_control/resources/asyncio/backup.py | 93 ++++ .../db_control/resources/asyncio/index.py | 32 +- .../resources/asyncio/restore_job.py | 56 +++ pinecone/db_control/resources/sync/backup.py | 87 ++++ pinecone/db_control/resources/sync/index.py | 37 +- .../db_control/resources/sync/restore_job.py | 56 +++ pinecone/legacy_pinecone_interface.py | 105 +++++ pinecone/openapi_support/api_version.py | 2 +- pinecone/pinecone.py | 62 ++- pinecone/pinecone_asyncio.py | 66 ++- pinecone/pinecone_interface_asyncio.py | 105 +++++ pinecone/scripts/repl.py | 52 ++ pinecone/utils/__init__.py | 2 + pinecone/utils/require_kwargs.py | 16 + tests/integration/control/backup/__init__.py | 0 tests/integration/control/backup/conftest.py | 168 +++++++ .../integration/control/backup/test_backup.py | 195 ++++++++ .../control/restore_job/__init__.py | 0 .../control/restore_job/conftest.py | 168 +++++++ .../control/restore_job/test_describe.py | 38 ++ .../control/restore_job/test_list.py | 58 +++ .../control_asyncio/backup/__init__.py | 0 .../control_asyncio/backup/conftest.py | 220 +++++++++ .../control_asyncio/backup/test_backup.py | 205 ++++++++ .../control_asyncio/restore_job/__init__.py | 0 .../control_asyncio/restore_job/conftest.py | 220 +++++++++ .../restore_job/test_describe.py | 42 ++ .../control_asyncio/restore_job/test_list.py | 67 +++ tests/integration/helpers/helpers.py | 3 + 68 files changed, 3807 insertions(+), 157 deletions(-) rename pinecone/core/openapi/db_control/model/{dedicated_spec.py => byoc_spec.py} (98%) create mode 100644 pinecone/core/openapi/db_control/model/create_index_from_backup_response.py create mode 100644 pinecone/core/openapi/db_data/api/namespace_operations_api.py create mode 100644 pinecone/core/openapi/db_data/model/list_namespaces_response.py create mode 100644 pinecone/core/openapi/db_data/model/namespace_description.py create mode 100644 pinecone/db_control/models/backup_list.py create mode 100644 pinecone/db_control/models/backup_model.py create mode 100644 pinecone/db_control/models/restore_job_list.py create mode 100644 pinecone/db_control/models/restore_job_model.py create mode 100644 pinecone/db_control/resources/asyncio/backup.py create mode 100644 pinecone/db_control/resources/asyncio/restore_job.py create mode 100644 pinecone/db_control/resources/sync/backup.py create mode 100644 pinecone/db_control/resources/sync/restore_job.py create mode 100644 pinecone/scripts/repl.py create mode 100644 pinecone/utils/require_kwargs.py create mode 100644 tests/integration/control/backup/__init__.py create mode 100644 tests/integration/control/backup/conftest.py create mode 100644 tests/integration/control/backup/test_backup.py create mode 100644 tests/integration/control/restore_job/__init__.py create mode 100644 tests/integration/control/restore_job/conftest.py create mode 100644 tests/integration/control/restore_job/test_describe.py create mode 100644 tests/integration/control/restore_job/test_list.py create mode 100644 tests/integration/control_asyncio/backup/__init__.py create mode 100644 tests/integration/control_asyncio/backup/conftest.py create mode 100644 tests/integration/control_asyncio/backup/test_backup.py create mode 100644 tests/integration/control_asyncio/restore_job/__init__.py create mode 100644 tests/integration/control_asyncio/restore_job/conftest.py create mode 100644 tests/integration/control_asyncio/restore_job/test_describe.py create mode 100644 tests/integration/control_asyncio/restore_job/test_list.py diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 53023bde..fa153f7e 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -11,8 +11,17 @@ jobs: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' strategy: + fail-fast: false matrix: python_version: [3.9, 3.12] + test_suite: + - tests/integration/control/index + - tests/integration/control/collections + - tests/integration/control/backup + - tests/integration/control/restore_job + - tests/integration/control_asyncio/index + - tests/integration/control_asyncio/backup + - tests/integration/control_asyncio/restore_job steps: - uses: actions/checkout@v4 - name: 'Set up Python ${{ matrix.python_version }}' @@ -23,11 +32,8 @@ jobs: uses: ./.github/actions/setup-poetry with: include_asyncio: true - - name: 'Run index tests' - run: poetry run pytest tests/integration/control/index --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG - - name: 'Run collection tests' - run: poetry run pytest tests/integration/control/collections --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG - + - name: 'Run tests' + run: poetry run pytest ${{ matrix.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG inference: name: Inference tests diff --git a/codegen/apis b/codegen/apis index ba143abc..09015d91 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit ba143abc7449abfcf0b6635f1aabff2400dac762 +Subproject commit 09015d9106f2578e473f45f55120aafc5c559f2a diff --git a/codegen/python-oas-templates b/codegen/python-oas-templates index 0f6ff685..c7c75f57 160000 --- a/codegen/python-oas-templates +++ b/codegen/python-oas-templates @@ -1 +1 @@ -Subproject commit 0f6ff68585355dd11e959e05859928d878d7854b +Subproject commit c7c75f57c6dfd0228a7bead444ea1004c57e0de3 diff --git a/pinecone/__init__.py b/pinecone/__init__.py index b2c0f5c8..d6d73c95 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -76,6 +76,10 @@ "PodSpec": ("pinecone.db_control.models", "PodSpec"), "PodSpecDefinition": ("pinecone.db_control.models", "PodSpecDefinition"), "PodType": ("pinecone.db_control.enums", "PodType"), + "RestoreJobModel": ("pinecone.db_control.models", "RestoreJobModel"), + "RestoreJobList": ("pinecone.db_control.models", "RestoreJobList"), + "BackupModel": ("pinecone.db_control.models", "BackupModel"), + "BackupList": ("pinecone.db_control.models", "BackupList"), } _config_lazy_imports = { diff --git a/pinecone/core/openapi/db_control/api/manage_indexes_api.py b/pinecone/core/openapi/db_control/api/manage_indexes_api.py index 3796fd26..ae478017 100644 --- a/pinecone/core/openapi/db_control/api/manage_indexes_api.py +++ b/pinecone/core/openapi/db_control/api/manage_indexes_api.py @@ -36,6 +36,9 @@ from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( CreateIndexFromBackupRequest, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_response import ( + CreateIndexFromBackupResponse, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest from pinecone.core.openapi.db_control.model.error_response import ErrorResponse from pinecone.core.openapi.db_control.model.index_list import IndexList @@ -281,7 +284,7 @@ def __create_collection( def __create_index(self, create_index_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Create an index # noqa: E501 - Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -352,7 +355,7 @@ def __create_index_for_model( ): """Create an index with integrated embedding # noqa: E501 - Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -418,7 +421,7 @@ def __create_index_for_model( callable=__create_index_for_model, ) - def __create_index_from_backup( + def __create_index_from_backup_operation( self, backup_id, create_index_from_backup_request, **kwargs: ExtraOpenApiKwargsTypedDict ): """Create an index from a backup # noqa: E501 @@ -427,7 +430,7 @@ def __create_index_from_backup( This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_index_from_backup(backup_id, create_index_from_backup_request, async_req=True) + >>> thread = api.create_index_from_backup_operation(backup_id, create_index_from_backup_request, async_req=True) >>> result = thread.get() Args: @@ -453,7 +456,7 @@ def __create_index_from_backup( async_req (bool): execute request asynchronously Returns: - None + CreateIndexFromBackupResponse If the method is called asynchronously, returns the request thread. """ @@ -462,12 +465,12 @@ def __create_index_from_backup( kwargs["create_index_from_backup_request"] = create_index_from_backup_request return self.call_with_http_info(**kwargs) - self.create_index_from_backup = _Endpoint( + self.create_index_from_backup_operation = _Endpoint( settings={ - "response_type": None, + "response_type": (CreateIndexFromBackupResponse,), "auth": ["ApiKeyAuth"], "endpoint_path": "/backups/{backup_id}/create-index", - "operation_id": "create_index_from_backup", + "operation_id": "create_index_from_backup_operation", "http_method": "POST", "servers": None, }, @@ -491,7 +494,7 @@ def __create_index_from_backup( }, headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_index_from_backup, + callable=__create_index_from_backup_operation, ) def __delete_backup(self, backup_id, **kwargs: ExtraOpenApiKwargsTypedDict): @@ -1192,6 +1195,8 @@ def __list_project_backups(self, **kwargs: ExtraOpenApiKwargsTypedDict): Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -1226,13 +1231,19 @@ def __list_project_backups(self, **kwargs: ExtraOpenApiKwargsTypedDict): "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, root_map={ - "validations": {}, + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, @@ -1519,7 +1530,7 @@ async def __create_collection(self, create_collection_request, **kwargs): async def __create_index(self, create_index_request, **kwargs): """Create an index # noqa: E501 - Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 Args: @@ -1581,7 +1592,7 @@ async def __create_index(self, create_index_request, **kwargs): async def __create_index_for_model(self, create_index_for_model_request, **kwargs): """Create an index with integrated embedding # noqa: E501 - Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). # noqa: E501 + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 Args: @@ -1640,7 +1651,7 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg callable=__create_index_for_model, ) - async def __create_index_from_backup( + async def __create_index_from_backup_operation( self, backup_id, create_index_from_backup_request, **kwargs ): """Create an index from a backup # noqa: E501 @@ -1670,19 +1681,19 @@ async def __create_index_from_backup( Default is True. Returns: - None + CreateIndexFromBackupResponse """ self._process_openapi_kwargs(kwargs) kwargs["backup_id"] = backup_id kwargs["create_index_from_backup_request"] = create_index_from_backup_request return await self.call_with_http_info(**kwargs) - self.create_index_from_backup = _AsyncioEndpoint( + self.create_index_from_backup_operation = _AsyncioEndpoint( settings={ - "response_type": None, + "response_type": (CreateIndexFromBackupResponse,), "auth": ["ApiKeyAuth"], "endpoint_path": "/backups/{backup_id}/create-index", - "operation_id": "create_index_from_backup", + "operation_id": "create_index_from_backup_operation", "http_method": "POST", "servers": None, }, @@ -1706,7 +1717,7 @@ async def __create_index_from_backup( }, headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_index_from_backup, + callable=__create_index_from_backup_operation, ) async def __delete_backup(self, backup_id, **kwargs): @@ -2333,6 +2344,8 @@ async def __list_project_backups(self, **kwargs): Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object @@ -2364,13 +2377,19 @@ async def __list_project_backups(self, **kwargs): "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, root_map={ - "validations": {}, + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, diff --git a/pinecone/core/openapi/db_control/model/dedicated_spec.py b/pinecone/core/openapi/db_control/model/byoc_spec.py similarity index 98% rename from pinecone/core/openapi/db_control/model/dedicated_spec.py rename to pinecone/core/openapi/db_control/model/byoc_spec.py index cb2c5d20..9b693e78 100644 --- a/pinecone/core/openapi/db_control/model/dedicated_spec.py +++ b/pinecone/core/openapi/db_control/model/byoc_spec.py @@ -30,10 +30,10 @@ from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property -T = TypeVar("T", bound="DedicatedSpec") +T = TypeVar("T", bound="ByocSpec") -class DedicatedSpec(ModelNormal): +class ByocSpec(ModelNormal): """NOTE: This class is @generated using OpenAPI. Do not edit the class manually. @@ -102,7 +102,7 @@ def discriminator(cls): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa: E501 - """DedicatedSpec - a model defined in OpenAPI + """ByocSpec - a model defined in OpenAPI Args: environment (str): The environment where the index is hosted. @@ -189,7 +189,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa @convert_js_args_to_python_args def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 - """DedicatedSpec - a model defined in OpenAPI + """ByocSpec - a model defined in OpenAPI Args: environment (str): The environment where the index is hosted. diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py index 106e9ab8..bb79710d 100644 --- a/pinecone/core/openapi/db_control/model/create_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -61,9 +61,7 @@ class CreateBackupRequest(ModelNormal): allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} - validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { - ("name",): {"max_length": 45, "min_length": 1} - } + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} @cached_class_property def additional_properties_type(cls): @@ -139,7 +137,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. [optional] # noqa: E501 + name (str): The name of the backup. [optional] # noqa: E501 description (str): A description of the backup. [optional] # noqa: E501 """ @@ -224,7 +222,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. [optional] # noqa: E501 + name (str): The name of the backup. [optional] # noqa: E501 description (str): A description of the backup. [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py new file mode 100644 index 00000000..ee6e7f36 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py @@ -0,0 +1,272 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateIndexFromBackupResponse") + + +class CreateIndexFromBackupResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "restore_job_id": (str,), # noqa: E501 + "index_id": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "restore_job_id": "restore_job_id", # noqa: E501 + "index_id": "index_id", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) -> T: # noqa: E501 + """CreateIndexFromBackupResponse - a model defined in OpenAPI + + Args: + restore_job_id (str): The ID of the restore job that was created. + index_id (str): The ID of the index that was created from the backup. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.index_id = index_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: E501 + """CreateIndexFromBackupResponse - a model defined in OpenAPI + + Args: + restore_job_id (str): The ID of the restore job that was created. + index_id (str): The ID of the index that was created from the backup. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.index_id = index_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/deletion_protection.py b/pinecone/core/openapi/db_control/model/deletion_protection.py index 77682626..3194d70e 100644 --- a/pinecone/core/openapi/db_control/model/deletion_protection.py +++ b/pinecone/core/openapi/db_control/model/deletion_protection.py @@ -111,10 +111,10 @@ def __init__(self, *args, **kwargs) -> None: Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 Keyword Args: - value (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + value (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -195,10 +195,10 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 Keyword Args: - value (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + value (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index 6bfa0a75..574deea1 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -28,11 +28,11 @@ def lazy_import(): - from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec - globals()["DedicatedSpec"] = DedicatedSpec + globals()["ByocSpec"] = ByocSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -96,7 +96,7 @@ def openapi_types(cls): """ lazy_import() return { - "dedicated": (DedicatedSpec,), # noqa: E501 + "byoc": (ByocSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 "serverless": (ServerlessSpec,), # noqa: E501 } @@ -106,7 +106,7 @@ def discriminator(cls): return None attribute_map: Dict[str, str] = { - "dedicated": "dedicated", # noqa: E501 + "byoc": "byoc", # noqa: E501 "pod": "pod", # noqa: E501 "serverless": "serverless", # noqa: E501 } @@ -151,7 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - dedicated (DedicatedSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ @@ -237,7 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - dedicated (DedicatedSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index cee103f2..9e0cc24b 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -28,11 +28,11 @@ def lazy_import(): - from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec - globals()["DedicatedSpec"] = DedicatedSpec + globals()["ByocSpec"] = ByocSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -91,7 +91,7 @@ def openapi_types(cls): return { "serverless": (ServerlessSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 - "dedicated": (DedicatedSpec,), # noqa: E501 + "byoc": (ByocSpec,), # noqa: E501 } @cached_class_property @@ -101,7 +101,7 @@ def discriminator(cls): attribute_map: Dict[str, str] = { "serverless": "serverless", # noqa: E501 "pod": "pod", # noqa: E501 - "dedicated": "dedicated", # noqa: E501 + "byoc": "byoc", # noqa: E501 } read_only_vars: Set[str] = set([]) @@ -146,7 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 - dedicated (DedicatedSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -232,7 +232,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 - dedicated (DedicatedSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/db_control/models/__init__.py b/pinecone/core/openapi/db_control/models/__init__.py index 59b93661..99c3bb9d 100644 --- a/pinecone/core/openapi/db_control/models/__init__.py +++ b/pinecone/core/openapi/db_control/models/__init__.py @@ -11,6 +11,7 @@ from pinecone.core.openapi.db_control.model.backup_list import BackupList from pinecone.core.openapi.db_control.model.backup_model import BackupModel +from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.collection_list import CollectionList from pinecone.core.openapi.db_control.model.collection_model import CollectionModel from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest @@ -34,8 +35,10 @@ from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( CreateIndexFromBackupRequest, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_response import ( + CreateIndexFromBackupResponse, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest -from pinecone.core.openapi.db_control.model.dedicated_spec import DedicatedSpec from pinecone.core.openapi.db_control.model.deletion_protection import DeletionProtection from pinecone.core.openapi.db_control.model.error_response import ErrorResponse from pinecone.core.openapi.db_control.model.error_response_error import ErrorResponseError diff --git a/pinecone/core/openapi/db_data/api/bulk_operations_api.py b/pinecone/core/openapi/db_data/api/bulk_operations_api.py index c0be9543..854e37af 100644 --- a/pinecone/core/openapi/db_data/api/bulk_operations_api.py +++ b/pinecone/core/openapi/db_data/api/bulk_operations_api.py @@ -44,7 +44,7 @@ def __init__(self, api_client=None) -> None: def __cancel_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): """Cancel an import # noqa: E501 - Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -113,7 +113,7 @@ def __cancel_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): def __describe_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): """Describe an import # noqa: E501 - Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -182,7 +182,7 @@ def __describe_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_bulk_imports(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -250,7 +250,7 @@ def __list_bulk_imports(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __start_bulk_import(self, start_import_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Start import # noqa: E501 - Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -331,7 +331,7 @@ def __init__(self, api_client=None) -> None: async def __cancel_bulk_import(self, id, **kwargs): """Cancel an import # noqa: E501 - Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: @@ -393,7 +393,7 @@ async def __cancel_bulk_import(self, id, **kwargs): async def __describe_bulk_import(self, id, **kwargs): """Describe an import # noqa: E501 - Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: @@ -455,7 +455,7 @@ async def __describe_bulk_import(self, id, **kwargs): async def __list_bulk_imports(self, **kwargs): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -516,7 +516,7 @@ async def __list_bulk_imports(self, **kwargs): async def __start_bulk_import(self, start_import_request, **kwargs): """Start import # noqa: E501 - Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/api/namespace_operations_api.py b/pinecone/core/openapi/db_data/api/namespace_operations_api.py new file mode 100644 index 00000000..e28e7430 --- /dev/null +++ b/pinecone/core/openapi/db_data/api/namespace_operations_api.py @@ -0,0 +1,443 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support import ApiClient, AsyncioApiClient +from pinecone.openapi_support.endpoint_utils import ( + ExtraOpenApiKwargsTypedDict, + KwargsWithOpenApiKwargDefaultsTypedDict, +) +from pinecone.openapi_support.endpoint import Endpoint as _Endpoint, ExtraOpenApiKwargsTypedDict +from pinecone.openapi_support.asyncio_endpoint import AsyncioEndpoint as _AsyncioEndpoint +from pinecone.openapi_support.model_utils import ( # noqa: F401 + date, + datetime, + file_type, + none_type, + validate_and_convert_types, +) +from pinecone.core.openapi.db_data.model.list_namespaces_response import ListNamespacesResponse +from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription +from pinecone.core.openapi.db_data.model.rpc_status import RpcStatus + + +class NamespaceOperationsApi: + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def __delete_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a namespace # noqa: E501 + + Delete a namespace from an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_namespace(namespace, async_req=True) + >>> result = thread.get() + + Args: + namespace (str): The namespace to delete + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + {str: (bool, dict, float, int, list, str, none_type)} + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return self.call_with_http_info(**kwargs) + + self.delete_namespace = _Endpoint( + settings={ + "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "delete_namespace", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_namespace, + ) + + def __describe_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a namespace # noqa: E501 + + Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_namespace(namespace, async_req=True) + >>> result = thread.get() + + Args: + namespace (str): The namespace to describe + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + NamespaceDescription + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return self.call_with_http_info(**kwargs) + + self.describe_namespace = _Endpoint( + settings={ + "response_type": (NamespaceDescription,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "describe_namespace", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_namespace, + ) + + def __list_namespaces_operation(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get list of all namespaces # noqa: E501 + + Get a list of all namespaces within an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_namespaces_operation(async_req=True) + >>> result = thread.get() + + + Keyword Args: + limit (int): Max number namespaces to return per page. [optional] + pagination_token (str): Pagination token to continue a previous listing operation. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ListNamespacesResponse + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_namespaces_operation = _Endpoint( + settings={ + "response_type": (ListNamespacesResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces", + "operation_id": "list_namespaces_operation", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_namespaces_operation, + ) + + +class AsyncioNamespaceOperationsApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __delete_namespace(self, namespace, **kwargs): + """Delete a namespace # noqa: E501 + + Delete a namespace from an index. # noqa: E501 + + + Args: + namespace (str): The namespace to delete + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + {str: (bool, dict, float, int, list, str, none_type)} + """ + self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return await self.call_with_http_info(**kwargs) + + self.delete_namespace = _AsyncioEndpoint( + settings={ + "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "delete_namespace", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_namespace, + ) + + async def __describe_namespace(self, namespace, **kwargs): + """Describe a namespace # noqa: E501 + + Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + + + Args: + namespace (str): The namespace to describe + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + NamespaceDescription + """ + self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return await self.call_with_http_info(**kwargs) + + self.describe_namespace = _AsyncioEndpoint( + settings={ + "response_type": (NamespaceDescription,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "describe_namespace", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_namespace, + ) + + async def __list_namespaces_operation(self, **kwargs): + """Get list of all namespaces # noqa: E501 + + Get a list of all namespaces within an index. # noqa: E501 + + + + Keyword Args: + limit (int): Max number namespaces to return per page. [optional] + pagination_token (str): Pagination token to continue a previous listing operation. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ListNamespacesResponse + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_namespaces_operation = _AsyncioEndpoint( + settings={ + "response_type": (ListNamespacesResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces", + "operation_id": "list_namespaces_operation", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_namespaces_operation, + ) diff --git a/pinecone/core/openapi/db_data/api/vector_operations_api.py b/pinecone/core/openapi/db_data/api/vector_operations_api.py index a5a3c70e..25ad63c5 100644 --- a/pinecone/core/openapi/db_data/api/vector_operations_api.py +++ b/pinecone/core/openapi/db_data/api/vector_operations_api.py @@ -55,7 +55,7 @@ def __init__(self, api_client=None) -> None: def __delete_vectors(self, delete_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Delete vectors # noqa: E501 - Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -195,7 +195,7 @@ def __describe_index_stats( def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): """Fetch vectors # noqa: E501 - Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -265,7 +265,7 @@ def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -350,7 +350,7 @@ def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Search with a vector # noqa: E501 - Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -421,7 +421,7 @@ def __search_records_namespace( ): """Search with text # noqa: E501 - Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -495,7 +495,7 @@ def __search_records_namespace( def __update_vector(self, update_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Update a vector # noqa: E501 - Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/data/update-data). # noqa: E501 + Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -566,7 +566,7 @@ def __upsert_records_namespace( ): """Upsert text # noqa: E501 - Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-text). # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -637,7 +637,7 @@ def __upsert_records_namespace( def __upsert_vectors(self, upsert_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Upsert vectors # noqa: E501 - Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-vectors). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -718,7 +718,7 @@ def __init__(self, api_client=None) -> None: async def __delete_vectors(self, delete_request, **kwargs): """Delete vectors # noqa: E501 - Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 Args: @@ -842,7 +842,7 @@ async def __describe_index_stats(self, describe_index_stats_request, **kwargs): async def __fetch_vectors(self, ids, **kwargs): """Fetch vectors # noqa: E501 - Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 Args: @@ -905,7 +905,7 @@ async def __fetch_vectors(self, ids, **kwargs): async def __list_vectors(self, **kwargs): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -983,7 +983,7 @@ async def __list_vectors(self, **kwargs): async def __query_vectors(self, query_request, **kwargs): """Search with a vector # noqa: E501 - Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 Args: @@ -1045,7 +1045,7 @@ async def __query_vectors(self, query_request, **kwargs): async def __search_records_namespace(self, namespace, search_records_request, **kwargs): """Search with text # noqa: E501 - Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 Args: @@ -1112,7 +1112,7 @@ async def __search_records_namespace(self, namespace, search_records_request, ** async def __update_vector(self, update_request, **kwargs): """Update a vector # noqa: E501 - Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/data/update-data). # noqa: E501 + Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 Args: @@ -1174,7 +1174,7 @@ async def __update_vector(self, update_request, **kwargs): async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): """Upsert text # noqa: E501 - Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-text). # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 Args: @@ -1238,7 +1238,7 @@ async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): async def __upsert_vectors(self, upsert_request, **kwargs): """Upsert vectors # noqa: E501 - Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data#upsert-vectors). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/apis/__init__.py b/pinecone/core/openapi/db_data/apis/__init__.py index 3d7112fd..a5caa981 100644 --- a/pinecone/core/openapi/db_data/apis/__init__.py +++ b/pinecone/core/openapi/db_data/apis/__init__.py @@ -14,4 +14,5 @@ # Import APIs into API package: from pinecone.core.openapi.db_data.api.bulk_operations_api import BulkOperationsApi +from pinecone.core.openapi.db_data.api.namespace_operations_api import NamespaceOperationsApi from pinecone.core.openapi.db_data.api.vector_operations_api import VectorOperationsApi diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index bfb15b87..48855458 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -144,7 +144,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -231,7 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index e024d5f3..3ea3bb6d 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -135,7 +135,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -219,7 +219,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/db_data/model/list_namespaces_response.py b/pinecone/core/openapi/db_data/model/list_namespaces_response.py new file mode 100644 index 00000000..3d75a727 --- /dev/null +++ b/pinecone/core/openapi/db_data/model/list_namespaces_response.py @@ -0,0 +1,274 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription + from pinecone.core.openapi.db_data.model.pagination import Pagination + + globals()["NamespaceDescription"] = NamespaceDescription + globals()["Pagination"] = Pagination + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ListNamespacesResponse") + + +class ListNamespacesResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "namespaces": ([NamespaceDescription],), # noqa: E501 + "pagination": (Pagination,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "namespaces": "namespaces", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """ListNamespacesResponse - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + namespaces ([NamespaceDescription]): The list of namespaces belonging to this index. [optional] # noqa: E501 + pagination (Pagination): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """ListNamespacesResponse - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + namespaces ([NamespaceDescription]): The list of namespaces belonging to this index. [optional] # noqa: E501 + pagination (Pagination): [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_data/model/namespace_description.py b/pinecone/core/openapi/db_data/model/namespace_description.py new file mode 100644 index 00000000..099b14b2 --- /dev/null +++ b/pinecone/core/openapi/db_data/model/namespace_description.py @@ -0,0 +1,264 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="NamespaceDescription") + + +class NamespaceDescription(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "record_count": (int,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "record_count": "record_count", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """NamespaceDescription - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the namespace. [optional] # noqa: E501 + record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """NamespaceDescription - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the namespace. [optional] # noqa: E501 + record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 7aa460c0..f1aaf07e 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -170,7 +170,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 @@ -265,7 +265,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index 6898e200..caa7ee40 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -154,7 +154,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 @@ -245,7 +245,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index 3138c601..2684894a 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -113,8 +113,8 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: """SearchRecordsRequestRerank - a model defined in OpenAPI Args: - model (str): The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). + model (str): The name of the [reranking model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use. + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -148,7 +148,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ @@ -205,8 +205,8 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 """SearchRecordsRequestRerank - a model defined in OpenAPI Args: - model (str): The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). + model (str): The name of the [reranking model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use. + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -240,7 +240,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index 1a4a9629..e0829047 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -120,7 +120,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 """StartImportRequest - a model defined in OpenAPI Args: - uri (str): The [URI prefix](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + uri (str): The [URI prefix](https://docs.pinecone.io/guides/index-data/import-data#prepare-your-data) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -209,7 +209,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 """StartImportRequest - a model defined in OpenAPI Args: - uri (str): The [URI prefix](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + uri (str): The [URI prefix](https://docs.pinecone.io/guides/index-data/import-data#prepare-your-data) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/pinecone/core/openapi/db_data/models/__init__.py b/pinecone/core/openapi/db_data/models/__init__.py index b5fa9fa0..34e9a6d8 100644 --- a/pinecone/core/openapi/db_data/models/__init__.py +++ b/pinecone/core/openapi/db_data/models/__init__.py @@ -20,7 +20,9 @@ from pinecone.core.openapi.db_data.model.index_description import IndexDescription from pinecone.core.openapi.db_data.model.list_imports_response import ListImportsResponse from pinecone.core.openapi.db_data.model.list_item import ListItem +from pinecone.core.openapi.db_data.model.list_namespaces_response import ListNamespacesResponse from pinecone.core.openapi.db_data.model.list_response import ListResponse +from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription from pinecone.core.openapi.db_data.model.namespace_summary import NamespaceSummary from pinecone.core.openapi.db_data.model.pagination import Pagination from pinecone.core.openapi.db_data.model.protobuf_any import ProtobufAny diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 3c9ec25b..a19557f8 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -46,7 +46,7 @@ def __init__(self, api_client=None) -> None: def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Generate vectors # noqa: E501 - Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). For guidance and examples, see [Embed data](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -253,7 +253,7 @@ def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __rerank(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank). # noqa: E501 + Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -332,7 +332,7 @@ def __init__(self, api_client=None) -> None: async def __embed(self, **kwargs): """Generate vectors # noqa: E501 - Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). For guidance and examples, see [Embed data](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 @@ -518,7 +518,7 @@ async def __list_models(self, **kwargs): async def __rerank(self, **kwargs): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank). # noqa: E501 + Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index 58e31290..ba7e786b 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -117,7 +117,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no """EmbedRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to use for embedding generation. + model (str): The [model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to use for embedding generation. inputs ([EmbedRequestInputs]): List of inputs to generate embeddings for. Keyword Args: @@ -151,7 +151,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -207,7 +207,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 """EmbedRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to use for embedding generation. + model (str): The [model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to use for embedding generation. inputs ([EmbedRequestInputs]): List of inputs to generate embeddings for. Keyword Args: @@ -241,7 +241,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py index 2ce733cc..6daa37b3 100644 --- a/pinecone/core/openapi/inference/model/model_info.py +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -77,9 +77,9 @@ class ModelInfo(ModelNormal): } validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { - ("dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1}, - ("sequence_length",): {"inclusive_minimum": 1}, - ("batch_size",): {"inclusive_minimum": 1}, + ("default_dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1}, + ("max_sequence_length",): {"inclusive_minimum": 1}, + ("max_batch_size",): {"inclusive_minimum": 1}, } @cached_class_property @@ -105,15 +105,17 @@ def openapi_types(cls): """ lazy_import() return { - "name": (str,), # noqa: E501 + "model": (str,), # noqa: E501 "short_description": (str,), # noqa: E501 "type": (str,), # noqa: E501 "supported_parameters": ([ModelInfoSupportedParameter],), # noqa: E501 "vector_type": (str,), # noqa: E501 - "dimension": (int,), # noqa: E501 + "default_dimension": (int,), # noqa: E501 "modality": (str,), # noqa: E501 - "sequence_length": (int,), # noqa: E501 - "batch_size": (int,), # noqa: E501 + "max_sequence_length": (int,), # noqa: E501 + "max_batch_size": (int,), # noqa: E501 + "provider_name": (str,), # noqa: E501 + "supported_dimensions": ([int],), # noqa: E501 "supported_metrics": (ModelInfoSupportedMetrics,), # noqa: E501 } @@ -122,15 +124,17 @@ def discriminator(cls): return None attribute_map: Dict[str, str] = { - "name": "name", # noqa: E501 + "model": "model", # noqa: E501 "short_description": "short_description", # noqa: E501 "type": "type", # noqa: E501 "supported_parameters": "supported_parameters", # noqa: E501 "vector_type": "vector_type", # noqa: E501 - "dimension": "dimension", # noqa: E501 + "default_dimension": "default_dimension", # noqa: E501 "modality": "modality", # noqa: E501 - "sequence_length": "sequence_length", # noqa: E501 - "batch_size": "batch_size", # noqa: E501 + "max_sequence_length": "max_sequence_length", # noqa: E501 + "max_batch_size": "max_batch_size", # noqa: E501 + "provider_name": "provider_name", # noqa: E501 + "supported_dimensions": "supported_dimensions", # noqa: E501 "supported_metrics": "supported_metrics", # noqa: E501 } @@ -141,12 +145,12 @@ def discriminator(cls): @classmethod @convert_js_args_to_python_args def _from_openapi_data( - cls: Type[T], name, short_description, type, supported_parameters, *args, **kwargs + cls: Type[T], model, short_description, type, supported_parameters, *args, **kwargs ) -> T: # noqa: E501 """ModelInfo - a model defined in OpenAPI Args: - name (str): The name of the model. + model (str): The name of the model. short_description (str): A summary of the model. type (str): The type of model (e.g. 'embed' or 'rerank'). supported_parameters ([ModelInfoSupportedParameter]): @@ -183,10 +187,12 @@ def _from_openapi_data( through its discriminator because we passed in _visited_composed_classes = (Animal,) vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 - dimension (int): The embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + default_dimension (int): The default embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 - sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 - batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + max_sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + max_batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + provider_name (str): The name of the provider of the model. [optional] # noqa: E501 + supported_dimensions ([int]): The list of supported dimensions for the model (applies to dense embedding models only). [optional] # noqa: E501 supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 """ @@ -213,7 +219,7 @@ def _from_openapi_data( self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.name = name + self.model = model self.short_description = short_description self.type = type self.supported_parameters = supported_parameters @@ -242,12 +248,12 @@ def _from_openapi_data( @convert_js_args_to_python_args def __init__( - self, name, short_description, type, supported_parameters, *args, **kwargs + self, model, short_description, type, supported_parameters, *args, **kwargs ) -> None: # noqa: E501 """ModelInfo - a model defined in OpenAPI Args: - name (str): The name of the model. + model (str): The name of the model. short_description (str): A summary of the model. type (str): The type of model (e.g. 'embed' or 'rerank'). supported_parameters ([ModelInfoSupportedParameter]): @@ -284,10 +290,12 @@ def __init__( through its discriminator because we passed in _visited_composed_classes = (Animal,) vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 - dimension (int): The embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + default_dimension (int): The default embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 - sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 - batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + max_sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + max_batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + provider_name (str): The name of the provider of the model. [optional] # noqa: E501 + supported_dimensions ([int]): The list of supported dimensions for the model (applies to dense embedding models only). [optional] # noqa: E501 supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 """ @@ -312,7 +320,7 @@ def __init__( self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.name = name + self.model = model self.short_description = short_description self.type = type self.supported_parameters = supported_parameters diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index 5727a4f7..d8ce884b 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -125,7 +125,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - """RerankRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use for reranking. + model (str): The [model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use for reranking. query (str): The query to rerank documents against. documents ([Document]): The documents to rerank. @@ -162,8 +162,8 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - _visited_composed_classes = (Animal,) top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -220,7 +220,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E """RerankRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use for reranking. + model (str): The [model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use for reranking. query (str): The query to rerank documents against. documents ([Document]): The documents to rerank. @@ -257,8 +257,8 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E _visited_composed_classes = (Animal,) top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index 69aef889..0f11c209 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -14,10 +14,15 @@ if TYPE_CHECKING: from .resources.sync.index import IndexResource from .resources.sync.collection import CollectionResource + from .resources.sync.restore_job import RestoreJobResource + from .resources.sync.backup import BackupResource + from pinecone.config import Config, OpenApiConfiguration class DBControl: - def __init__(self, config, openapi_config, pool_threads): + def __init__( + self, config: "Config", openapi_config: "OpenApiConfiguration", pool_threads: int + ) -> None: self._config = config """ @private """ @@ -43,6 +48,12 @@ def __init__(self, config, openapi_config, pool_threads): self._collection_resource: Optional["CollectionResource"] = None """ @private """ + self._restore_job_resource: Optional["RestoreJobResource"] = None + """ @private """ + + self._backup_resource: Optional["BackupResource"] = None + """ @private """ + @property def index(self) -> "IndexResource": if self._index_resource is None: @@ -58,3 +69,19 @@ def collection(self) -> "CollectionResource": self._collection_resource = CollectionResource(self._index_api) return self._collection_resource + + @property + def restore_job(self) -> "RestoreJobResource": + if self._restore_job_resource is None: + from .resources.sync.restore_job import RestoreJobResource + + self._restore_job_resource = RestoreJobResource(self._index_api) + return self._restore_job_resource + + @property + def backup(self) -> "BackupResource": + if self._backup_resource is None: + from .resources.sync.backup import BackupResource + + self._backup_resource = BackupResource(self._index_api) + return self._backup_resource diff --git a/pinecone/db_control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py index 91e3f179..7ae2196a 100644 --- a/pinecone/db_control/db_control_asyncio.py +++ b/pinecone/db_control/db_control_asyncio.py @@ -14,10 +14,13 @@ if TYPE_CHECKING: from .resources.asyncio.index import IndexResourceAsyncio from .resources.asyncio.collection import CollectionResourceAsyncio + from .resources.asyncio.restore_job import RestoreJobResourceAsyncio + from .resources.asyncio.backup import BackupResourceAsyncio + from pinecone.config import Config, OpenApiConfiguration class DBControlAsyncio: - def __init__(self, config, openapi_config): + def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration") -> None: self._config = config """ @private """ @@ -39,6 +42,12 @@ def __init__(self, config, openapi_config): self._collection_resource: Optional["CollectionResourceAsyncio"] = None """ @private """ + self._restore_job_resource: Optional["RestoreJobResourceAsyncio"] = None + """ @private """ + + self._backup_resource: Optional["BackupResourceAsyncio"] = None + """ @private """ + @property def index(self) -> "IndexResourceAsyncio": if self._index_resource is None: @@ -56,3 +65,19 @@ def collection(self) -> "CollectionResourceAsyncio": self._collection_resource = CollectionResourceAsyncio(self._index_api) return self._collection_resource + + @property + def restore_job(self) -> "RestoreJobResourceAsyncio": + if self._restore_job_resource is None: + from .resources.asyncio.restore_job import RestoreJobResourceAsyncio + + self._restore_job_resource = RestoreJobResourceAsyncio(self._index_api) + return self._restore_job_resource + + @property + def backup(self) -> "BackupResourceAsyncio": + if self._backup_resource is None: + from .resources.asyncio.backup import BackupResourceAsyncio + + self._backup_resource = BackupResourceAsyncio(self._index_api) + return self._backup_resource diff --git a/pinecone/db_control/models/__init__.py b/pinecone/db_control/models/__init__.py index 34003bfe..66568de3 100644 --- a/pinecone/db_control/models/__init__.py +++ b/pinecone/db_control/models/__init__.py @@ -6,6 +6,11 @@ from .collection_list import CollectionList from .index_model import IndexModel from ...inference.models.index_embed import IndexEmbed +from .backup_model import BackupModel +from .backup_list import BackupList +from .restore_job_model import RestoreJobModel +from .restore_job_list import RestoreJobList + __all__ = [ "CollectionDescription", @@ -17,4 +22,8 @@ "CollectionList", "IndexModel", "IndexEmbed", + "BackupModel", + "BackupList", + "RestoreJobModel", + "RestoreJobList", ] diff --git a/pinecone/db_control/models/backup_list.py b/pinecone/db_control/models/backup_list.py new file mode 100644 index 00000000..fe21c077 --- /dev/null +++ b/pinecone/db_control/models/backup_list.py @@ -0,0 +1,49 @@ +import json +from pinecone.core.openapi.db_control.model.backup_list import BackupList as OpenAPIBackupList +from .backup_model import BackupModel +from typing import List + + +class BackupList: + def __init__(self, backup_list: OpenAPIBackupList): + self._backup_list = backup_list + self._backups = [BackupModel(b) for b in self._backup_list.data] + + def names(self) -> List[str]: + return [i.name for i in self._backups] + + def __getitem__(self, key): + if isinstance(key, int): + return self._backups[key] + elif key == "data": + return self._backups + else: + # pagination and any other keys added in the future + return self._backup_list[key] + + def __getattr__(self, attr): + if attr == "data": + return self._backups + else: + # pagination and any other keys added in the future + return getattr(self._backup_list, attr) + + def __len__(self): + return len(self._backups) + + def __iter__(self): + return iter(self._backups) + + def __str__(self): + return str(self._backups) + + def __repr__(self): + raw_dict = self._backup_list.to_dict() + raw_dict["data"] = [i.to_dict() for i in self._backups] + + # Remove keys with value None + for key, value in list(raw_dict.items()): + if value is None: + del raw_dict[key] + + return json.dumps(raw_dict, indent=4) diff --git a/pinecone/db_control/models/backup_model.py b/pinecone/db_control/models/backup_model.py new file mode 100644 index 00000000..0d49d33e --- /dev/null +++ b/pinecone/db_control/models/backup_model.py @@ -0,0 +1,23 @@ +import json +from pinecone.core.openapi.db_control.model.backup_model import BackupModel as OpenAPIBackupModel +from pinecone.utils.repr_overrides import custom_serializer + + +class BackupModel: + def __init__(self, backup: OpenAPIBackupModel): + self._backup = backup + + def __str__(self): + return str(self._backup) + + def __getattr__(self, attr): + return getattr(self._backup, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + return self._backup.to_dict() diff --git a/pinecone/db_control/models/index_model.py b/pinecone/db_control/models/index_model.py index 75ba1f30..a268df57 100644 --- a/pinecone/db_control/models/index_model.py +++ b/pinecone/db_control/models/index_model.py @@ -1,4 +1,6 @@ from pinecone.core.openapi.db_control.model.index_model import IndexModel as OpenAPIIndexModel +import json +from pinecone.utils.repr_overrides import custom_serializer class IndexModel: @@ -15,5 +17,8 @@ def __getattr__(self, attr): def __getitem__(self, key): return self.__getattr__(key) + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + def to_dict(self): return self.index.to_dict() diff --git a/pinecone/db_control/models/restore_job_list.py b/pinecone/db_control/models/restore_job_list.py new file mode 100644 index 00000000..7c80aa96 --- /dev/null +++ b/pinecone/db_control/models/restore_job_list.py @@ -0,0 +1,50 @@ +import json +from pinecone.core.openapi.db_control.model.restore_job_list import ( + RestoreJobList as OpenAPIRestoreJobList, +) +from .restore_job_model import RestoreJobModel + +from datetime import datetime + + +def custom_serializer(obj): + if isinstance(obj, datetime): + return obj.isoformat() + else: + return str(obj) + + +class RestoreJobList: + def __init__(self, restore_job_list: OpenAPIRestoreJobList): + self._restore_job_list = restore_job_list + self._restore_jobs = [RestoreJobModel(r) for r in self._restore_job_list.data] + + def __getitem__(self, key): + if isinstance(key, int): + return self._restore_jobs[key] + elif key == "data": + return self._restore_jobs + else: + # pagination and any other keys added in the future + return self._restore_job_list[key] + + def __getattr__(self, attr): + if attr == "data": + return self._restore_jobs + else: + # pagination and any other keys added in the future + return getattr(self._restore_job_list, attr) + + def __len__(self): + return len(self._restore_jobs) + + def __iter__(self): + return iter(self._restore_jobs) + + def __str__(self): + return str(self._restore_jobs) + + def __repr__(self): + return json.dumps( + [i.to_dict() for i in self._restore_jobs], indent=4, default=custom_serializer + ) diff --git a/pinecone/db_control/models/restore_job_model.py b/pinecone/db_control/models/restore_job_model.py new file mode 100644 index 00000000..1dc6902d --- /dev/null +++ b/pinecone/db_control/models/restore_job_model.py @@ -0,0 +1,25 @@ +import json +from pinecone.core.openapi.db_control.model.restore_job_model import ( + RestoreJobModel as OpenAPIRestoreJobModel, +) +from pinecone.utils.repr_overrides import custom_serializer + + +class RestoreJobModel: + def __init__(self, restore_job: OpenAPIRestoreJobModel): + self.restore_job = restore_job + + def __str__(self): + return str(self.restore_job) + + def __getattr__(self, attr): + return getattr(self.restore_job, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + return self.restore_job.to_dict() diff --git a/pinecone/db_control/repr_overrides.py b/pinecone/db_control/repr_overrides.py index 714b8dfb..ce6e9611 100644 --- a/pinecone/db_control/repr_overrides.py +++ b/pinecone/db_control/repr_overrides.py @@ -1,5 +1,4 @@ -from pinecone.utils import install_json_repr_override -from pinecone.db_control.models.index_model import IndexModel +from pinecone.utils.repr_overrides import install_json_repr_override from pinecone.core.openapi.db_control.model.collection_model import CollectionModel @@ -12,5 +11,5 @@ def install_repr_overrides(): from pprint.pformat seems better for data plane objects such as lists of query results. """ - for model in [IndexModel, CollectionModel]: + for model in [CollectionModel]: install_json_repr_override(model) diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index 719f71a1..a5d298ca 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -29,7 +29,9 @@ ) from pinecone.core.openapi.db_control.model.pod_spec import PodSpec as PodSpecModel from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig - +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed from pinecone.db_control.enums import ( @@ -211,6 +213,21 @@ def create_index_for_model_request( return CreateIndexForModelRequest(**args) + @staticmethod + def create_index_from_backup_request( + name: str, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + tags: Optional[Dict[str, str]] = None, + ) -> CreateIndexFromBackupRequest: + if deletion_protection is not None: + dp = PineconeDBControlRequestFactory.__parse_deletion_protection(deletion_protection) + else: + dp = None + + tags_obj = PineconeDBControlRequestFactory.__parse_tags(tags) + + return CreateIndexFromBackupRequest(name=name, deletion_protection=dp, tags=tags_obj) + @staticmethod def configure_index_request( description: IndexModel, diff --git a/pinecone/db_control/resources/asyncio/backup.py b/pinecone/db_control/resources/asyncio/backup.py new file mode 100644 index 00000000..391da1e1 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/backup.py @@ -0,0 +1,93 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest +from pinecone.db_control.models import BackupModel, BackupList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class BackupResourceAsyncio: + def __init__(self, index_api: AsyncioManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + async def list( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> BackupList: + """ + List backups for an index or for the project. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for the next page of backups. + """ + if index_name is not None: + args = parse_non_empty_args( + [ + ("index_name", index_name), + ("limit", limit), + ("pagination_token", pagination_token), + ] + ) + result = await self._index_api.list_index_backups(**args) + return BackupList(result) + else: + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + result = await self._index_api.list_project_backups(**args) + return BackupList(result) + + @require_kwargs + async def create( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> BackupModel: + """ + Create a backup for an index. + + Args: + index_name (str): The name of the index to create a backup for. + backup_name (str): The name of the backup to create. + description (str): The description of the backup. + + Returns: + BackupModel: The created backup. + """ + req = CreateBackupRequest(name=backup_name, description=description) + result = await self._index_api.create_backup( + index_name=index_name, create_backup_request=req + ) + return BackupModel(result) + + @require_kwargs + async def describe(self, *, backup_id: str) -> BackupModel: + """ + Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + + Returns: + BackupModel: The described backup. + """ + result = await self._index_api.describe_backup(backup_id=backup_id) + return BackupModel(result) + + @require_kwargs + async def get(self, *, backup_id: str) -> BackupModel: + """Alias for describe""" + return await self.describe(backup_id=backup_id) + + @require_kwargs + async def delete(self, *, backup_id: str) -> None: + """ + Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + return await self._index_api.delete_backup(backup_id=backup_id) diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py index 2d93ae01..b48ff99c 100644 --- a/pinecone/db_control/resources/asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -27,8 +27,8 @@ class IndexResourceAsyncio: def __init__(self, index_api, config): - self.index_api = index_api - self.config = config + self._index_api = index_api + self._config = config async def create( self, @@ -50,7 +50,7 @@ async def create( vector_type=vector_type, tags=tags, ) - resp = await self.index_api.create_index(create_index_request=req) + resp = await self._index_api.create_index(create_index_request=req) if timeout == -1: return IndexModel(resp) @@ -74,12 +74,28 @@ async def create_for_model( tags=tags, deletion_protection=deletion_protection, ) - resp = await self.index_api.create_index_for_model(req) + resp = await self._index_api.create_index_for_model(req) if timeout == -1: return IndexModel(resp) return await self.__poll_describe_index_until_ready(name, timeout) + async def create_from_backup( + self, + name: str, + backup_id: str, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_from_backup_request( + name=name, deletion_protection=deletion_protection, tags=tags + ) + await self._index_api.create_index_from_backup_operation( + backup_id=backup_id, create_index_from_backup_request=req + ) + return await self.__poll_describe_index_until_ready(name, timeout) + async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): description = None @@ -119,7 +135,7 @@ async def is_ready() -> bool: return description async def delete(self, name: str, timeout: Optional[int] = None): - await self.index_api.delete_index(name) + await self._index_api.delete_index(name) if timeout == -1: return @@ -141,11 +157,11 @@ async def delete(self, name: str, timeout: Optional[int] = None): ) async def list(self) -> IndexList: - response = await self.index_api.list_indexes() + response = await self._index_api.list_indexes() return IndexList(response) async def describe(self, name: str) -> IndexModel: - description = await self.index_api.describe_index(name) + description = await self._index_api.describe_index(name) return IndexModel(description) async def has(self, name: str) -> bool: @@ -172,4 +188,4 @@ async def configure( deletion_protection=deletion_protection, tags=tags, ) - await self.index_api.configure_index(name, configure_index_request=req) + await self._index_api.configure_index(name, configure_index_request=req) diff --git a/pinecone/db_control/resources/asyncio/restore_job.py b/pinecone/db_control/resources/asyncio/restore_job.py new file mode 100644 index 00000000..397a5050 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/restore_job.py @@ -0,0 +1,56 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.db_control.models import RestoreJobModel, RestoreJobList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class RestoreJobResourceAsyncio: + def __init__(self, index_api: AsyncioManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + async def get(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + job = await self._index_api.describe_restore_job(job_id=job_id) + return RestoreJobModel(job) + + @require_kwargs + async def describe(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. Alias for get. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + return await self.get(job_id=job_id) + + @require_kwargs + async def list( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> RestoreJobList: + """ + List all restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for the next page of restore jobs. + + Returns: + List[RestoreJobModel]: The list of restore jobs. + """ + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + jobs = await self._index_api.list_restore_jobs(**args) + return RestoreJobList(jobs) diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py new file mode 100644 index 00000000..123b33fb --- /dev/null +++ b/pinecone/db_control/resources/sync/backup.py @@ -0,0 +1,87 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest +from pinecone.db_control.models import BackupModel, BackupList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class BackupResource: + def __init__(self, index_api: ManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + def list( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> BackupList: + """ + List backups for an index or for the project. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for the next page of backups. + """ + if index_name is not None: + args = parse_non_empty_args( + [ + ("index_name", index_name), + ("limit", limit), + ("pagination_token", pagination_token), + ] + ) + return BackupList(self._index_api.list_index_backups(**args)) + else: + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + return BackupList(self._index_api.list_project_backups(**args)) + + @require_kwargs + def create(self, *, index_name: str, backup_name: str, description: str = "") -> BackupModel: + """ + Create a backup for an index. + + Args: + index_name (str): The name of the index to create a backup for. + backup_name (str): The name of the backup to create. + description (str): The description of the backup. + + Returns: + BackupModel: The created backup. + """ + req = CreateBackupRequest(name=backup_name, description=description) + return BackupModel( + self._index_api.create_backup(index_name=index_name, create_backup_request=req) + ) + + @require_kwargs + def describe(self, *, backup_id: str) -> BackupModel: + """ + Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + + Returns: + BackupModel: The described backup. + """ + return BackupModel(self._index_api.describe_backup(backup_id=backup_id)) + + @require_kwargs + def get(self, *, backup_id: str) -> BackupModel: + """Alias for describe""" + return self.describe(backup_id=backup_id) + + @require_kwargs + def delete(self, *, backup_id: str) -> None: + """ + Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + return self._index_api.delete_backup(backup_id=backup_id) diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index 6ecf4cd2..d5e7d6e2 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -5,7 +5,7 @@ from pinecone.db_control.index_host_store import IndexHostStore from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed -from pinecone.utils import docslinks +from pinecone.utils import docslinks, require_kwargs from pinecone.db_control.enums import ( Metric, @@ -86,6 +86,41 @@ def create_for_model( return IndexModel(resp) return self.__poll_describe_index_until_ready(name, timeout) + @require_kwargs + def create_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> IndexModel: + """ + Create an index from a backup. + + Args: + name (str): The name of the index to create. + backup_id (str): The ID of the backup to create the index from. + deletion_protection (DeletionProtection): The deletion protection to use for the index. + tags (Dict[str, str]): The tags to use for the index. + timeout (int): The number of seconds to wait for the index to be ready. If -1, the function will return without polling for the index status to be ready. If None, the function will poll indefinitely for the index to be ready. + + Returns: + IndexModel: The created index. + """ + req = PineconeDBControlRequestFactory.create_index_from_backup_request( + name=name, deletion_protection=deletion_protection, tags=tags + ) + resp = self._index_api.create_index_from_backup_operation( + backup_id=backup_id, create_index_from_backup_request=req + ) + logger.info(f"Creating index from backup. Response: {resp}") + + if timeout == -1: + return self.describe(name=name) + return self.__poll_describe_index_until_ready(name, timeout) + def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): description = None diff --git a/pinecone/db_control/resources/sync/restore_job.py b/pinecone/db_control/resources/sync/restore_job.py new file mode 100644 index 00000000..b314bc53 --- /dev/null +++ b/pinecone/db_control/resources/sync/restore_job.py @@ -0,0 +1,56 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi +from pinecone.db_control.models import RestoreJobModel, RestoreJobList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class RestoreJobResource: + def __init__(self, index_api: ManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + def get(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + job = self._index_api.describe_restore_job(job_id=job_id) + return RestoreJobModel(job) + + @require_kwargs + def describe(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. Alias for get. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + return self.get(job_id=job_id) + + @require_kwargs + def list( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> RestoreJobList: + """ + List all restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for the next page of restore jobs. + + Returns: + List[RestoreJobModel]: The list of restore jobs. + """ + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + jobs = self._index_api.list_restore_jobs(**args) + return RestoreJobList(jobs) diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index 0b097261..cb896022 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -10,6 +10,10 @@ CollectionList, IndexModel, IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, ) from pinecone.db_control.enums import ( Metric, @@ -297,6 +301,36 @@ def create_index( """ pass + @abstractmethod + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + """ + Create an index from a backup. + + Call `list_backups` to get a list of backups for your project. + + :param name: The name of the index to create. + :type name: str + :param backup_id: The ID of the backup to restore. + :type backup_id: str + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :type deletion_protection: Optional[Literal["enabled", "disabled"]] + :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. + :type tags: Optional[Dict[str, str]] + :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; + if -1, return immediately and do not wait. + :return: A description of the index that was created. + :rtype: IndexModel + """ + pass + @abstractmethod def create_index_for_model( self, @@ -701,6 +735,77 @@ def describe_collection(self, name: str): """ pass + @abstractmethod + def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + """Create a backup of an index. + + Args: + index_name (str): The name of the index to backup. + backup_name (str): The name to give the backup. + description (str): Optional description of the backup. + """ + pass + + @abstractmethod + def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + """List backups. + + If index_name is provided, the backups will be filtered by index. If no index_name is provided, all backups in the projectwill be returned. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + def describe_backup(self, *, backup_id: str) -> "BackupModel": + """Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + """ + pass + + @abstractmethod + def delete_backup(self, *, backup_id: str) -> None: + """Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + pass + + @abstractmethod + def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + """List restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + """Describe a restore job. + + Args: + job_id (str): The ID of the restore job to describe. + """ + pass + @abstractmethod def Index(self, name: str = "", host: str = "", **kwargs): """ diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index 4879ae90..de57ca38 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -2,4 +2,4 @@ # Do not edit this file manually. API_VERSION = "2025-04" -APIS_REPO_SHA = "483b3885439a51ef831b820bfa621e2c9515834f" +APIS_REPO_SHA = "4b1c83b3b6669e6596151a575c284ee2cf4977a7" diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index e5bc112a..ae854129 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -7,7 +7,7 @@ from .legacy_pinecone_interface import LegacyPineconeDBControlInterface -from pinecone.utils import normalize_host, PluginAware, docslinks +from pinecone.utils import normalize_host, PluginAware, docslinks, require_kwargs from .langchain_import_warnings import _build_langchain_attribute_error_message logger = logging.getLogger(__name__) @@ -41,6 +41,10 @@ IndexList, CollectionList, IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, ) @@ -212,6 +216,24 @@ def create_index_for_model( timeout=timeout, ) + @require_kwargs + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + return self.db.index.create_from_backup( + name=name, + backup_id=backup_id, + deletion_protection=deletion_protection, + tags=tags, + timeout=timeout, + ) + def delete_index(self, name: str, timeout: Optional[int] = None): return self.db.index.delete(name=name, timeout=timeout) @@ -252,6 +274,44 @@ def delete_collection(self, name: str) -> None: def describe_collection(self, name: str): return self.db.collection.describe(name=name) + @require_kwargs + def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + return self.db.backup.create( + index_name=index_name, backup_name=backup_name, description=description + ) + + @require_kwargs + def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + return self.db.backup.list( + index_name=index_name, limit=limit, pagination_token=pagination_token + ) + + @require_kwargs + def describe_backup(self, *, backup_id: str) -> "BackupModel": + return self.db.backup.describe(backup_id=backup_id) + + @require_kwargs + def delete_backup(self, *, backup_id: str) -> None: + return self.db.backup.delete(backup_id=backup_id) + + @require_kwargs + def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + return self.db.restore_job.list(limit=limit, pagination_token=pagination_token) + + @require_kwargs + def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + return self.db.restore_job.describe(job_id=job_id) + @staticmethod def from_texts(*args, **kwargs): """@private""" diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 278039e6..124ac854 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -4,7 +4,7 @@ from pinecone.config import PineconeConfig, ConfigBuilder -from pinecone.utils import normalize_host +from pinecone.utils import normalize_host, require_kwargs from pinecone.utils import docslinks from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface @@ -30,8 +30,12 @@ IndexList, CollectionList, IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, ) - from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi from pinecone.db_control.index_host_store import IndexHostStore logger = logging.getLogger(__name__) @@ -179,7 +183,7 @@ def index_host_store(self) -> "IndexHostStore": return self.db.index._index_host_store @property - def index_api(self) -> "ManageIndexesApi": + def index_api(self) -> "AsyncioManageIndexesApi": """@private""" warnings.warn( "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", @@ -231,6 +235,24 @@ async def create_index_for_model( timeout=timeout, ) + @require_kwargs + async def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + return await self.db.index.create_from_backup( + name=name, + backup_id=backup_id, + deletion_protection=deletion_protection, + tags=tags, + timeout=timeout, + ) + async def delete_index(self, name: str, timeout: Optional[int] = None): return await self.db.index.delete(name=name, timeout=timeout) @@ -271,6 +293,44 @@ async def delete_collection(self, name: str): async def describe_collection(self, name: str): return await self.db.collection.describe(name=name) + @require_kwargs + async def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + return await self.db.backup.create( + index_name=index_name, backup_name=backup_name, description=description + ) + + @require_kwargs + async def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + return await self.db.backup.list( + index_name=index_name, limit=limit, pagination_token=pagination_token + ) + + @require_kwargs + async def describe_backup(self, *, backup_id: str) -> "BackupModel": + return await self.db.backup.describe(backup_id=backup_id) + + @require_kwargs + async def delete_backup(self, *, backup_id: str) -> None: + return await self.db.backup.delete(backup_id=backup_id) + + @require_kwargs + async def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + return await self.db.restore_job.list(limit=limit, pagination_token=pagination_token) + + @require_kwargs + async def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + return await self.db.restore_job.describe(job_id=job_id) + def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": from pinecone.db_data import _IndexAsyncio diff --git a/pinecone/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py index 31d1feba..6dfd953c 100644 --- a/pinecone/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -14,6 +14,10 @@ CollectionList, IndexModel, IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, ) from pinecone.db_control.enums import ( Metric, @@ -482,6 +486,36 @@ async def main(): """ pass + @abstractmethod + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + """ + Create an index from a backup. + + Call `list_backups` to get a list of backups for your project. + + :param name: The name of the index to create. + :type name: str + :param backup_id: The ID of the backup to restore. + :type backup_id: str + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :type deletion_protection: Optional[Literal["enabled", "disabled"]] + :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. + :type tags: Optional[Dict[str, str]] + :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; + if -1, return immediately and do not wait. + :return: A description of the index that was created. + :rtype: IndexModel + """ + pass + @abstractmethod async def delete_index(self, name: str, timeout: Optional[int] = None): """ @@ -772,6 +806,77 @@ async def main(): """ pass + @abstractmethod + async def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + """Create a backup of an index. + + Args: + index_name (str): The name of the index to backup. + backup_name (str): The name to give the backup. + description (str): Optional description of the backup. + """ + pass + + @abstractmethod + async def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + """List backups. + + If index_name is provided, the backups will be filtered by index. If no index_name is provided, all backups in the projectwill be returned. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + async def describe_backup(self, *, backup_id: str) -> "BackupModel": + """Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + """ + pass + + @abstractmethod + async def delete_backup(self, *, backup_id: str) -> None: + """Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + pass + + @abstractmethod + async def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + """List restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + async def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + """Describe a restore job. + + Args: + job_id (str): The ID of the restore job to describe. + """ + pass + @abstractmethod async def create_collection(self, name: str, source: str): """Create a collection from a pod-based index diff --git a/pinecone/scripts/repl.py b/pinecone/scripts/repl.py new file mode 100644 index 00000000..55f80c5c --- /dev/null +++ b/pinecone/scripts/repl.py @@ -0,0 +1,52 @@ +import code +import logging + + +def setup_logging(): + # Create a custom formatter + formatter = logging.Formatter( + fmt="%(asctime)s | %(levelname)-8s | %(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + + # Create and configure the console handler + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + + # Configure the root logger + root_logger = logging.getLogger() + root_logger.setLevel(logging.INFO) + root_logger.addHandler(console_handler) + + return root_logger + + +def main(): + # Set up logging + logger = setup_logging() + logger.info("Initializing environment...") + + # You can add any setup code here, such as: + # - Setting environment variables + # - Importing commonly used modules + # - Loading configuration files + + # Start the interactive REPL + banner = """ + Welcome to the custom Python REPL! + Your initialization steps have been completed. + """ + + # Create a custom namespace with any pre-loaded variables + namespace = { + "__name__": "__main__", + "__doc__": None, + "logger": logger, # Make logger available in REPL + # Add any other variables you want to have available in the REPL + } + + # Start the interactive console + code.interact(banner=banner, local=namespace) + + +if __name__ == "__main__": + main() diff --git a/pinecone/utils/__init__.py b/pinecone/utils/__init__.py index 3d72b5d6..33d286d8 100644 --- a/pinecone/utils/__init__.py +++ b/pinecone/utils/__init__.py @@ -17,6 +17,7 @@ from .error_handling import validate_and_convert_errors from .plugin_aware import PluginAware from .filter_dict import filter_dict +from .require_kwargs import require_kwargs __all__ = [ "PluginAware", @@ -36,4 +37,5 @@ "validate_and_convert_errors", "convert_enum_to_string", "filter_dict", + "require_kwargs", ] diff --git a/pinecone/utils/require_kwargs.py b/pinecone/utils/require_kwargs.py new file mode 100644 index 00000000..9321f468 --- /dev/null +++ b/pinecone/utils/require_kwargs.py @@ -0,0 +1,16 @@ +import functools +import inspect + + +def require_kwargs(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if len(args) > 1: # First arg is self + param_names = list(inspect.signature(func).parameters.keys())[1:] # Skip self + raise TypeError( + f"{func.__name__}() requires keyword arguments. " + f"Please use {func.__name__}({', '.join(f'{name}=value' for name in param_names)})" + ) + return func(*args, **kwargs) + + return wrapper diff --git a/tests/integration/control/backup/__init__.py b/tests/integration/control/backup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/backup/conftest.py b/tests/integration/control/backup/conftest.py new file mode 100644 index 00000000..9798da27 --- /dev/null +++ b/tests/integration/control/backup/conftest.py @@ -0,0 +1,168 @@ +import pytest +import uuid +import time +import logging +import dotenv +from pinecone import Pinecone, NotFoundException, PineconeApiException +from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + return index_tags_helper(request, RUN_ID) + + +@pytest.fixture() +def pc(): + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone( + api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} + ) + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + pc.create_index(**create_sl_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): + logger.debug( + "Deleting index " + + index_name + + ", retry " + + str(retries) + + ", next sleep interval " + + str(sleep_interval) + ) + try: + pc.db.index.delete(name=index_name, timeout=-1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise Exception("Unable to delete index " + index_name) + time.sleep(sleep_interval) + delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) + else: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + except Exception as e: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + + +@pytest.fixture(autouse=True) +def cleanup(pc, index_name): + yield + + try: + desc = pc.db.index.describe(name=index_name) + if desc.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {index_name}") + pc.db.index.configure(name=index_name, deletion_protection="disabled") + logger.debug("Attempting to delete index with name: " + index_name) + pc.db.index.delete(name=index_name, timeout=-1) + except Exception: + pass + + for backup in pc.db.backup.list(): + logger.debug(f"Deleting backup: {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + try: + pc = Pinecone() + indexes = pc.db.index.list() + test_indexes = [ + idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID + ] + + logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") + + for idx in test_indexes: + if idx.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {idx.name}") + pc.db.index.configure(name=idx.name, deletion_protection="disabled") + # Wait for index to be updated with status ready + logger.info(f"Waiting for index {idx.name} to be ready...") + timeout = 60 + while True and timeout > 0: + is_ready = pc.db.index.describe(name=idx.name).ready + if is_ready: + break + time.sleep(1) + timeout -= 1 + if timeout <= 0: + logger.warning(f"Index {idx.name} did not become ready in time") + else: + logger.info(f"Deletion protection is already disabled for index: {idx.name}") + + for idx in test_indexes: + try: + logger.info(f"Deleting index: {idx.name}") + pc.db.index.delete(name=idx.name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index {idx.name}: {str(e)}") + + backups = pc.db.backup.list() + if len(backups) > 0: + logger.info(f"Deleting {len(backups)} backups") + for backup in backups: + logger.debug(f"Deleting backup: {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") + else: + logger.info("No backups to delete") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup completed") diff --git a/tests/integration/control/backup/test_backup.py b/tests/integration/control/backup/test_backup.py new file mode 100644 index 00000000..6873c414 --- /dev/null +++ b/tests/integration/control/backup/test_backup.py @@ -0,0 +1,195 @@ +import pytest +import random +from ...helpers import random_string, poll_stats_for_namespace +import logging +import time + +logger = logging.getLogger(__name__) + + +class TestBackups: + def test_create_backup(self, pc, ready_sl_index, index_tags): + desc = pc.db.index.describe(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + idx = pc.Index(name=ready_sl_index) + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + idx.upsert( + vectors=[ + {"id": random_string(15), "values": [random.random() for _ in range(dimension)]} + for _ in range(batch_size) + ], + namespace=ns, + ) + + poll_stats_for_namespace(idx=idx, namespace=ns, expected_count=batch_size * num_batches) + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + time.sleep(180) + + index_stats = idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = pc.db.backup.create(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = pc.db.backup.describe(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = pc.db.backup.describe(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + time.sleep(5) + + # Verify that the backup shows in list + backups_list = pc.db.backup.list(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = pc.db.index.create_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = pc.db.restore_job.list(index_name=new_index_name) + assert len(restore_jobs) == 1 + + # Verify that the new index has the same data as the original index + new_idx = pc.Index(name=new_index_name) + stats = new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + pc.db.index.delete(name=new_index_name) + + # Delete the backup + pc.db.backup.delete(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + pc.db.backup.describe(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = pc.db.backup.list() + assert len(backup_list) == 0 + + def test_create_backup_legacy_syntax(self, pc, ready_sl_index, index_tags): + desc = pc.describe_index(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + idx = pc.Index(name=ready_sl_index) + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + idx.upsert( + vectors=[ + {"id": random_string(15), "values": [random.random() for _ in range(dimension)]} + for _ in range(batch_size) + ], + namespace=ns, + ) + + poll_stats_for_namespace(idx=idx, namespace=ns, expected_count=batch_size * num_batches) + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + time.sleep(180) + + index_stats = idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = pc.create_backup(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = pc.describe_backup(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = pc.describe_backup(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + time.sleep(5) + + # Verify that the backup shows in list + backups_list = pc.list_backups(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = pc.create_index_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = pc.list_restore_jobs(index_name=new_index_name) + assert len(restore_jobs) == 1 + + # Verify that the new index has the same data as the original index + new_idx = pc.Index(name=new_index_name) + stats = new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + pc.delete_index(name=new_index_name) + + # Delete the backup + pc.delete_backup(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + pc.describe_backup(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = pc.list_backups(index_name=ready_sl_index) + assert len(backup_list) == 0 diff --git a/tests/integration/control/restore_job/__init__.py b/tests/integration/control/restore_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/restore_job/conftest.py b/tests/integration/control/restore_job/conftest.py new file mode 100644 index 00000000..9798da27 --- /dev/null +++ b/tests/integration/control/restore_job/conftest.py @@ -0,0 +1,168 @@ +import pytest +import uuid +import time +import logging +import dotenv +from pinecone import Pinecone, NotFoundException, PineconeApiException +from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + return index_tags_helper(request, RUN_ID) + + +@pytest.fixture() +def pc(): + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone( + api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} + ) + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + pc.create_index(**create_sl_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): + logger.debug( + "Deleting index " + + index_name + + ", retry " + + str(retries) + + ", next sleep interval " + + str(sleep_interval) + ) + try: + pc.db.index.delete(name=index_name, timeout=-1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise Exception("Unable to delete index " + index_name) + time.sleep(sleep_interval) + delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) + else: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + except Exception as e: + logger.error(e.__class__) + logger.error(e) + raise Exception("Unable to delete index " + index_name) + + +@pytest.fixture(autouse=True) +def cleanup(pc, index_name): + yield + + try: + desc = pc.db.index.describe(name=index_name) + if desc.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {index_name}") + pc.db.index.configure(name=index_name, deletion_protection="disabled") + logger.debug("Attempting to delete index with name: " + index_name) + pc.db.index.delete(name=index_name, timeout=-1) + except Exception: + pass + + for backup in pc.db.backup.list(): + logger.debug(f"Deleting backup: {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + try: + pc = Pinecone() + indexes = pc.db.index.list() + test_indexes = [ + idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID + ] + + logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") + + for idx in test_indexes: + if idx.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {idx.name}") + pc.db.index.configure(name=idx.name, deletion_protection="disabled") + # Wait for index to be updated with status ready + logger.info(f"Waiting for index {idx.name} to be ready...") + timeout = 60 + while True and timeout > 0: + is_ready = pc.db.index.describe(name=idx.name).ready + if is_ready: + break + time.sleep(1) + timeout -= 1 + if timeout <= 0: + logger.warning(f"Index {idx.name} did not become ready in time") + else: + logger.info(f"Deletion protection is already disabled for index: {idx.name}") + + for idx in test_indexes: + try: + logger.info(f"Deleting index: {idx.name}") + pc.db.index.delete(name=idx.name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index {idx.name}: {str(e)}") + + backups = pc.db.backup.list() + if len(backups) > 0: + logger.info(f"Deleting {len(backups)} backups") + for backup in backups: + logger.debug(f"Deleting backup: {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") + else: + logger.info("No backups to delete") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup completed") diff --git a/tests/integration/control/restore_job/test_describe.py b/tests/integration/control/restore_job/test_describe.py new file mode 100644 index 00000000..7b3809b9 --- /dev/null +++ b/tests/integration/control/restore_job/test_describe.py @@ -0,0 +1,38 @@ +import pytest +from pinecone import Pinecone, PineconeApiException +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + + +class TestRestoreJobDescribe: + def test_describe_restore_job(self, pc: Pinecone): + jobs = pc.db.restore_job.list() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = pc.db.restore_job.describe(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + assert restore_job.restore_job_id == restore_job_id + assert restore_job.backup_id is not None + assert isinstance(restore_job.status, str) + assert isinstance(restore_job.backup_id, str) + assert isinstance(restore_job.completed_at, datetime) + assert isinstance(restore_job.created_at, datetime) + assert isinstance(restore_job.percent_complete, float) + assert isinstance(restore_job.target_index_id, str) + assert isinstance(restore_job.target_index_name, str) + + def test_describe_restore_job_legacy_syntax(self, pc: Pinecone): + jobs = pc.list_restore_jobs() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = pc.describe_restore_job(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + def test_describe_restore_job_with_invalid_job_id(self, pc: Pinecone): + with pytest.raises(PineconeApiException): + pc.db.restore_job.describe(job_id="invalid") diff --git a/tests/integration/control/restore_job/test_list.py b/tests/integration/control/restore_job/test_list.py new file mode 100644 index 00000000..379b37dd --- /dev/null +++ b/tests/integration/control/restore_job/test_list.py @@ -0,0 +1,58 @@ +import pytest +import logging +from pinecone import Pinecone, PineconeApiValueError, PineconeApiException + +logger = logging.getLogger(__name__) + + +class TestRestoreJobList: + def test_list_restore_jobs_no_arguments(self, pc: Pinecone): + restore_jobs = pc.db.restore_job.list() + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + + # This assumes the backup test has been run at least once + # in the same project. + assert len(restore_jobs.data) >= 1 + + def test_list_restore_jobs_with_optional_arguments(self, pc: Pinecone): + restore_jobs = pc.db.restore_job.list(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = pc.db.restore_job.list( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + def test_list_restore_jobs_legacy_syntax(self, pc: Pinecone): + restore_jobs = pc.list_restore_jobs(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = pc.list_restore_jobs(limit=2, pagination_token=restore_jobs.pagination.next) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + +class TestRestoreJobListErrors: + def test_list_restore_jobs_with_invalid_limit(self, pc: Pinecone): + with pytest.raises(PineconeApiValueError): + pc.db.restore_job.list(limit=-1) + + def test_list_restore_jobs_with_invalid_pagination_token(self, pc: Pinecone): + with pytest.raises(PineconeApiException): + pc.db.restore_job.list(pagination_token="invalid") diff --git a/tests/integration/control_asyncio/backup/__init__.py b/tests/integration/control_asyncio/backup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/backup/conftest.py b/tests/integration/control_asyncio/backup/conftest.py new file mode 100644 index 00000000..3a7a5607 --- /dev/null +++ b/tests/integration/control_asyncio/backup/conftest.py @@ -0,0 +1,220 @@ +import pytest +import time +import random +import asyncio +import uuid +from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper +import logging +from typing import Callable, Optional, Awaitable, Union + +from pinecone import ( + CloudProvider, + AwsRegion, + ServerlessSpec, + PineconeApiException, + NotFoundException, +) + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + return index_tags_helper(request, RUN_ID) + + +logger = logging.getLogger(__name__) + + +def build_client(): + from pinecone import PineconeAsyncio + + return PineconeAsyncio() + + +@pytest.fixture(scope="session") +def client(): + # This returns the sync client. Not for use in tests + # but can be used to help with cleanup after test runs + from pinecone import Pinecone + + return Pinecone() + + +async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec) + + +@pytest.fixture() +def random_vector(): + return [random.uniform(0, 1) for _ in range(10)] + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(client, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + client.create_index(**create_sl_index_params) + yield index_name + client.delete_index(index_name, -1) + + +@pytest.fixture() +def notready_sl_index(client, index_name, create_sl_index_params): + client.create_index(**create_sl_index_params, timeout=-1) + yield index_name + + +def delete_with_retry(client, index_name, retries=0, sleep_interval=5): + logger.info( + f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + raise "Unable to delete index " + index_name + + +@pytest.fixture(autouse=True) +async def cleanup(client, index_name): + yield + + try: + desc = client.index.describe(name=index_name) + if desc.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {index_name}") + client.index.configure(name=index_name, deletion_protection="disabled") + logger.debug("Attempting to delete index with name: " + index_name) + client.index.delete(name=index_name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + pass + + for backup in client.db.backup.list(): + logger.debug(f"Deleting backup: {backup.name}") + try: + client.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/control_asyncio/backup/test_backup.py b/tests/integration/control_asyncio/backup/test_backup.py new file mode 100644 index 00000000..47a67c54 --- /dev/null +++ b/tests/integration/control_asyncio/backup/test_backup.py @@ -0,0 +1,205 @@ +import pytest +import random +import asyncio +from ...helpers import random_string +import logging +from pinecone import PineconeAsyncio + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestBackups: + async def test_create_backup(self, ready_sl_index, index_tags): + async with PineconeAsyncio() as pc: + desc = await pc.db.index.describe(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + async with pc.IndexAsyncio(host=desc.host) as idx: + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + await idx.upsert( + vectors=[ + { + "id": random_string(15), + "values": [random.random() for _ in range(dimension)], + } + for _ in range(batch_size) + ], + namespace=ns, + ) + + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + await asyncio.sleep(180) + + index_stats = await idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = await pc.db.backup.create(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = await pc.db.backup.describe(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = await pc.db.backup.describe(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + await asyncio.sleep(5) + + # Verify that the backup shows in list + backups_list = await pc.db.backup.list(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = await pc.db.index.create_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = await pc.db.restore_job.list() + assert len(restore_jobs) >= 1 + + # Verify that the new index has the same data as the original index + new_desc = await pc.db.index.describe(name=new_index_name) + async with pc.IndexAsyncio(host=new_desc.host) as new_idx: + stats = await new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + await pc.db.index.delete(name=new_index_name) + + # Delete the backup + await pc.db.backup.delete(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + await pc.db.backup.describe(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = await pc.db.backup.list() + assert len(backup_list) == 0 + + async def test_create_backup_legacy_syntax(self, ready_sl_index, index_tags): + async with PineconeAsyncio() as pc: + desc = await pc.describe_index(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + async with pc.IndexAsyncio(host=desc.host) as idx: + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + await idx.upsert( + vectors=[ + { + "id": random_string(15), + "values": [random.random() for _ in range(dimension)], + } + for _ in range(batch_size) + ], + namespace=ns, + ) + + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + await asyncio.sleep(180) + + index_stats = await idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = await pc.create_backup(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = await pc.describe_backup(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = await pc.describe_backup(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + await asyncio.sleep(5) + + # Verify that the backup shows in list + backups_list = await pc.list_backups(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = await pc.create_index_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = await pc.list_restore_jobs() + assert len(restore_jobs) >= 1 + + # Verify that the new index has the same data as the original index + new_desc = await pc.db.index.describe(name=new_index_name) + async with pc.IndexAsyncio(host=new_desc.host) as new_idx: + stats = await new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + await pc.delete_index(name=new_index_name) + + # Delete the backup + await pc.delete_backup(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + await pc.describe_backup(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = await pc.list_backups(index_name=ready_sl_index) + assert len(backup_list) == 0 diff --git a/tests/integration/control_asyncio/restore_job/__init__.py b/tests/integration/control_asyncio/restore_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/restore_job/conftest.py b/tests/integration/control_asyncio/restore_job/conftest.py new file mode 100644 index 00000000..3a7a5607 --- /dev/null +++ b/tests/integration/control_asyncio/restore_job/conftest.py @@ -0,0 +1,220 @@ +import pytest +import time +import random +import asyncio +import uuid +from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper +import logging +from typing import Callable, Optional, Awaitable, Union + +from pinecone import ( + CloudProvider, + AwsRegion, + ServerlessSpec, + PineconeApiException, + NotFoundException, +) + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def index_tags(request): + return index_tags_helper(request, RUN_ID) + + +logger = logging.getLogger(__name__) + + +def build_client(): + from pinecone import PineconeAsyncio + + return PineconeAsyncio() + + +@pytest.fixture(scope="session") +def client(): + # This returns the sync client. Not for use in tests + # but can be used to help with cleanup after test runs + from pinecone import Pinecone + + return Pinecone() + + +async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +@pytest.fixture() +def serverless_cloud(): + return get_environment_var("SERVERLESS_CLOUD", "aws") + + +@pytest.fixture() +def serverless_region(): + return get_environment_var("SERVERLESS_REGION", "us-west-2") + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} + + +@pytest.fixture() +def create_sl_index_params(index_name, serverless_cloud, serverless_region): + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec) + + +@pytest.fixture() +def random_vector(): + return [random.uniform(0, 1) for _ in range(10)] + + +@pytest.fixture() +def index_name(request): + test_name = request.node.name + return generate_index_name(test_name) + + +@pytest.fixture() +def ready_sl_index(client, index_name, create_sl_index_params): + create_sl_index_params["timeout"] = None + client.create_index(**create_sl_index_params) + yield index_name + client.delete_index(index_name, -1) + + +@pytest.fixture() +def notready_sl_index(client, index_name, create_sl_index_params): + client.create_index(**create_sl_index_params, timeout=-1) + yield index_name + + +def delete_with_retry(client, index_name, retries=0, sleep_interval=5): + logger.info( + f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + raise "Unable to delete index " + index_name + + +@pytest.fixture(autouse=True) +async def cleanup(client, index_name): + yield + + try: + desc = client.index.describe(name=index_name) + if desc.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index: {index_name}") + client.index.configure(name=index_name, deletion_protection="disabled") + logger.debug("Attempting to delete index with name: " + index_name) + client.index.delete(name=index_name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + pass + + for backup in client.db.backup.list(): + logger.debug(f"Deleting backup: {backup.name}") + try: + client.db.backup.delete(backup_id=backup.backup_id) + except Exception as e: + logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/control_asyncio/restore_job/test_describe.py b/tests/integration/control_asyncio/restore_job/test_describe.py new file mode 100644 index 00000000..d32595cf --- /dev/null +++ b/tests/integration/control_asyncio/restore_job/test_describe.py @@ -0,0 +1,42 @@ +import pytest +from pinecone import PineconeAsyncio, PineconeApiException +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestRestoreJobDescribe: + async def test_describe_restore_job(self): + async with PineconeAsyncio() as pc: + jobs = await pc.db.restore_job.list() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = await pc.db.restore_job.describe(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + assert restore_job.restore_job_id == restore_job_id + assert restore_job.backup_id is not None + assert isinstance(restore_job.status, str) + assert isinstance(restore_job.backup_id, str) + assert isinstance(restore_job.completed_at, datetime) + assert isinstance(restore_job.created_at, datetime) + assert isinstance(restore_job.percent_complete, float) + assert isinstance(restore_job.target_index_id, str) + assert isinstance(restore_job.target_index_name, str) + + async def test_describe_restore_job_legacy_syntax(self): + async with PineconeAsyncio() as pc: + jobs = await pc.list_restore_jobs() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = await pc.describe_restore_job(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + async def test_describe_restore_job_with_invalid_job_id(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiException): + await pc.db.restore_job.describe(job_id="invalid") diff --git a/tests/integration/control_asyncio/restore_job/test_list.py b/tests/integration/control_asyncio/restore_job/test_list.py new file mode 100644 index 00000000..0e0814da --- /dev/null +++ b/tests/integration/control_asyncio/restore_job/test_list.py @@ -0,0 +1,67 @@ +import pytest +import logging +from pinecone import PineconeAsyncio, PineconeApiValueError, PineconeApiException + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestRestoreJobList: + async def test_list_restore_jobs_no_arguments(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.db.restore_job.list() + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + + # This assumes the backup test has been run at least once + # in the same project. + assert len(restore_jobs.data) >= 1 + + async def test_list_restore_jobs_with_optional_arguments(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.db.restore_job.list(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = await pc.db.restore_job.list( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + async def test_list_restore_jobs_legacy_syntax(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.list_restore_jobs(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = await pc.list_restore_jobs( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + +@pytest.mark.asyncio +class TestRestoreJobListErrors: + async def test_list_restore_jobs_with_invalid_limit(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiValueError): + await pc.db.restore_job.list(limit=-1) + + async def test_list_restore_jobs_with_invalid_pagination_token(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiException): + await pc.db.restore_job.list(pagination_token="invalid") diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index d9990df4..4dbe7d22 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -91,6 +91,9 @@ def poll_stats_for_namespace( raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t + logger.debug( + f"Found {stats}. Waiting for {expected_count} vectors in namespace {namespace}." + ) time.sleep(delta_t) From 6efd33329e02a8807a4eeeb4af20b85c10bf6f2b Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Wed, 14 May 2025 15:25:41 -0400 Subject: [PATCH 32/48] Fix backup integration tests (#483) ## Problem The cleanup steps in the backup tests were deleting all backups in a project, which creates a problem when multiple jobs in a test matrix are running in parallel and backups may be suddenly deleted out from underneath a running test. ## Solution - Cleanup should only delete backups created with the current RUN_ID. This information is stored in index tags and is visible on the backup resource as well. - Cleanup duplication in conftest setup. It's easier to make these changes in one spot than in 6-8 spots. ## Type of Change - [x] Bug fix (non-breaking change which fixes an issue) --- .../testing-integration-asyncio.yaml | 2 +- .github/workflows/testing-integration.yaml | 16 +- pinecone/db_control/resources/sync/backup.py | 2 +- tests/integration/control/backup/conftest.py | 168 ------------- .../control/collections/conftest.py | 136 ----------- tests/integration/control/index/conftest.py | 161 ------------- .../control/{backup => resources}/__init__.py | 0 .../backup}/__init__.py | 0 .../{ => resources}/backup/test_backup.py | 20 +- .../collections}/__init__.py | 0 .../{ => resources}/collections/helpers.py | 0 .../collections/test_dense_index.py | 2 +- .../integration/control/resources/conftest.py | 66 ++++++ .../index}/__init__.py | 0 .../{ => resources}/index/test_configure.py | 0 .../{ => resources}/index/test_create.py | 104 ++++----- .../{ => resources}/index/test_delete.py | 0 .../{ => resources}/index/test_describe.py | 20 +- .../control/{ => resources}/index/test_has.py | 8 +- .../{ => resources}/index/test_list.py | 6 +- .../resources/restore_job}/__init__.py | 0 .../restore_job/test_describe.py | 0 .../{ => resources}/restore_job/test_list.py | 0 .../control/restore_job/conftest.py | 168 ------------- .../control_asyncio/backup/conftest.py | 220 ------------------ tests/integration/control_asyncio/conftest.py | 75 ------ .../control_asyncio/index/conftest.py | 206 ---------------- .../{index => resources}/__init__.py | 0 .../backup}/__init__.py | 0 .../{ => resources}/backup/test_backup.py | 6 +- .../control_asyncio/resources/conftest.py | 66 ++++++ .../resources/index/__init__.py | 0 .../resources/index/conftest.py | 18 ++ .../{ => resources}/index/test_create.py | 66 +++--- .../resources/restore_job/__init__.py | 0 .../restore_job/test_describe.py | 0 .../{ => resources}/restore_job/test_list.py | 0 .../control_asyncio/restore_job/conftest.py | 220 ------------------ tests/integration/helpers/__init__.py | 3 + tests/integration/helpers/helpers.py | 150 +++++++++++- 40 files changed, 429 insertions(+), 1480 deletions(-) delete mode 100644 tests/integration/control/backup/conftest.py delete mode 100644 tests/integration/control/collections/conftest.py delete mode 100644 tests/integration/control/index/conftest.py rename tests/integration/control/{backup => resources}/__init__.py (100%) rename tests/integration/control/{collections => resources/backup}/__init__.py (100%) rename tests/integration/control/{ => resources}/backup/test_backup.py (92%) rename tests/integration/control/{index => resources/collections}/__init__.py (100%) rename tests/integration/control/{ => resources}/collections/helpers.py (100%) rename tests/integration/control/{ => resources}/collections/test_dense_index.py (99%) create mode 100644 tests/integration/control/resources/conftest.py rename tests/integration/control/{restore_job => resources/index}/__init__.py (100%) rename tests/integration/control/{ => resources}/index/test_configure.py (100%) rename tests/integration/control/{ => resources}/index/test_create.py (77%) rename tests/integration/control/{ => resources}/index/test_delete.py (100%) rename tests/integration/control/{ => resources}/index/test_describe.py (65%) rename tests/integration/control/{ => resources}/index/test_has.py (63%) rename tests/integration/control/{ => resources}/index/test_list.py (84%) rename tests/integration/{control_asyncio/backup => control/resources/restore_job}/__init__.py (100%) rename tests/integration/control/{ => resources}/restore_job/test_describe.py (100%) rename tests/integration/control/{ => resources}/restore_job/test_list.py (100%) delete mode 100644 tests/integration/control/restore_job/conftest.py delete mode 100644 tests/integration/control_asyncio/backup/conftest.py delete mode 100644 tests/integration/control_asyncio/index/conftest.py rename tests/integration/control_asyncio/{index => resources}/__init__.py (100%) rename tests/integration/control_asyncio/{restore_job => resources/backup}/__init__.py (100%) rename tests/integration/control_asyncio/{ => resources}/backup/test_backup.py (97%) create mode 100644 tests/integration/control_asyncio/resources/conftest.py create mode 100644 tests/integration/control_asyncio/resources/index/__init__.py create mode 100644 tests/integration/control_asyncio/resources/index/conftest.py rename tests/integration/control_asyncio/{ => resources}/index/test_create.py (74%) create mode 100644 tests/integration/control_asyncio/resources/restore_job/__init__.py rename tests/integration/control_asyncio/{ => resources}/restore_job/test_describe.py (100%) rename tests/integration/control_asyncio/{ => resources}/restore_job/test_list.py (100%) delete mode 100644 tests/integration/control_asyncio/restore_job/conftest.py diff --git a/.github/workflows/testing-integration-asyncio.yaml b/.github/workflows/testing-integration-asyncio.yaml index 8c8fab60..eb5e3b91 100644 --- a/.github/workflows/testing-integration-asyncio.yaml +++ b/.github/workflows/testing-integration-asyncio.yaml @@ -51,6 +51,6 @@ jobs: include_asyncio: true include_dev: true - name: 'db_control asyncio' - run: poetry run pytest tests/integration/control_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG + run: poetry run pytest tests/integration/control_asyncio/*.py --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index fa153f7e..5175d4dc 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -7,7 +7,7 @@ jobs: name: Reorg tests runs-on: ubuntu-latest env: - PINECONE_DEBUG_CURL: 'true' + PINECONE_DEBUG_CURL: 'false' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' strategy: @@ -15,13 +15,13 @@ jobs: matrix: python_version: [3.9, 3.12] test_suite: - - tests/integration/control/index - - tests/integration/control/collections - - tests/integration/control/backup - - tests/integration/control/restore_job - - tests/integration/control_asyncio/index - - tests/integration/control_asyncio/backup - - tests/integration/control_asyncio/restore_job + - tests/integration/control/resources/index + - tests/integration/control/resources/collections + - tests/integration/control/resources/backup + - tests/integration/control/resources/restore_job + - tests/integration/control_asyncio/resources/index + - tests/integration/control_asyncio/resources/backup + - tests/integration/control_asyncio/resources/restore_job steps: - uses: actions/checkout@v4 - name: 'Set up Python ${{ matrix.python_version }}' diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py index 123b33fb..dbc576e9 100644 --- a/pinecone/db_control/resources/sync/backup.py +++ b/pinecone/db_control/resources/sync/backup.py @@ -23,7 +23,7 @@ def list( List backups for an index or for the project. Args: - index_name (str): The name of the index to list backups for. + index_name (str): The name of the index to list backups for. If not provided, list all backups for the project. limit (int): The maximum number of backups to return. pagination_token (str): The pagination token to use for the next page of backups. """ diff --git a/tests/integration/control/backup/conftest.py b/tests/integration/control/backup/conftest.py deleted file mode 100644 index 9798da27..00000000 --- a/tests/integration/control/backup/conftest.py +++ /dev/null @@ -1,168 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - for backup in pc.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - backups = pc.db.backup.list() - if len(backups) > 0: - logger.info(f"Deleting {len(backups)} backups") - for backup in backups: - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - else: - logger.info("No backups to delete") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control/collections/conftest.py b/tests/integration/control/collections/conftest.py deleted file mode 100644 index bb592cee..00000000 --- a/tests/integration/control/collections/conftest.py +++ /dev/null @@ -1,136 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -import os -from datetime import datetime -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import get_environment_var - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - test_name = request.node.name - if test_name is None: - test_name = "" - else: - test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") - - tags = { - "test-suite": "pinecone-python-client", - "test-run": RUN_ID, - "test": test_name, - "created-at": datetime.now().strftime("%Y-%m-%d"), - } - - if os.getenv("USER"): - tags["user"] = os.getenv("USER") - return tags - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def pod_environment(): - return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all collection tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - collections = pc.db.collection.list() - logger.info(f"Collections to delete: {[col.name for col in collections]}") - - for col in collections: - try: - logger.info(f"Deleting collection: {col.name}") - pc.db.collection.delete(name=col.name) - except Exception as e: - logger.warning(f"Failed to delete collection {col.name}: {str(e)}") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup of collections tests completed") diff --git a/tests/integration/control/index/conftest.py b/tests/integration/control/index/conftest.py deleted file mode 100644 index 985c4bb6..00000000 --- a/tests/integration/control/index/conftest.py +++ /dev/null @@ -1,161 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def pod_environment(): - return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -@pytest.fixture() -def notready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = -1 - pc.create_index(**create_sl_index_params) - yield index_name - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control/backup/__init__.py b/tests/integration/control/resources/__init__.py similarity index 100% rename from tests/integration/control/backup/__init__.py rename to tests/integration/control/resources/__init__.py diff --git a/tests/integration/control/collections/__init__.py b/tests/integration/control/resources/backup/__init__.py similarity index 100% rename from tests/integration/control/collections/__init__.py rename to tests/integration/control/resources/backup/__init__.py diff --git a/tests/integration/control/backup/test_backup.py b/tests/integration/control/resources/backup/test_backup.py similarity index 92% rename from tests/integration/control/backup/test_backup.py rename to tests/integration/control/resources/backup/test_backup.py index 6873c414..f61df1e5 100644 --- a/tests/integration/control/backup/test_backup.py +++ b/tests/integration/control/resources/backup/test_backup.py @@ -1,14 +1,15 @@ import pytest import random -from ...helpers import random_string, poll_stats_for_namespace +from ....helpers import random_string, poll_stats_for_namespace import logging import time +from pinecone import Pinecone logger = logging.getLogger(__name__) class TestBackups: - def test_create_backup(self, pc, ready_sl_index, index_tags): + def test_create_backup(self, pc: Pinecone, ready_sl_index, index_tags): desc = pc.db.index.describe(name=ready_sl_index) dimension = desc.dimension @@ -78,8 +79,9 @@ def test_create_backup(self, pc, ready_sl_index, index_tags): assert new_index.metric == desc.metric # Can list restore jobs - restore_jobs = pc.db.restore_job.list(index_name=new_index_name) - assert len(restore_jobs) == 1 + logger.info("Listing restore jobs") + restore_jobs = pc.db.restore_job.list() + assert len(restore_jobs) >= 1, f"Expected at least one restore job, got {len(restore_jobs)}" # Verify that the new index has the same data as the original index new_idx = pc.Index(name=new_index_name) @@ -97,11 +99,7 @@ def test_create_backup(self, pc, ready_sl_index, index_tags): with pytest.raises(Exception): pc.db.backup.describe(backup_id=backup.backup_id) - # Verify that the new index is deleted - backup_list = pc.db.backup.list() - assert len(backup_list) == 0 - - def test_create_backup_legacy_syntax(self, pc, ready_sl_index, index_tags): + def test_create_backup_legacy_syntax(self, pc: Pinecone, ready_sl_index, index_tags): desc = pc.describe_index(name=ready_sl_index) dimension = desc.dimension @@ -171,8 +169,8 @@ def test_create_backup_legacy_syntax(self, pc, ready_sl_index, index_tags): assert new_index.metric == desc.metric # Can list restore jobs - restore_jobs = pc.list_restore_jobs(index_name=new_index_name) - assert len(restore_jobs) == 1 + restore_jobs = pc.list_restore_jobs() + assert len(restore_jobs) >= 1 # Verify that the new index has the same data as the original index new_idx = pc.Index(name=new_index_name) diff --git a/tests/integration/control/index/__init__.py b/tests/integration/control/resources/collections/__init__.py similarity index 100% rename from tests/integration/control/index/__init__.py rename to tests/integration/control/resources/collections/__init__.py diff --git a/tests/integration/control/collections/helpers.py b/tests/integration/control/resources/collections/helpers.py similarity index 100% rename from tests/integration/control/collections/helpers.py rename to tests/integration/control/resources/collections/helpers.py diff --git a/tests/integration/control/collections/test_dense_index.py b/tests/integration/control/resources/collections/test_dense_index.py similarity index 99% rename from tests/integration/control/collections/test_dense_index.py rename to tests/integration/control/resources/collections/test_dense_index.py index 58ad0832..6c76a962 100644 --- a/tests/integration/control/collections/test_dense_index.py +++ b/tests/integration/control/resources/collections/test_dense_index.py @@ -1,6 +1,6 @@ import time from pinecone import PodSpec -from ...helpers import generate_index_name, generate_collection_name +from ....helpers import generate_index_name, generate_collection_name import logging from .helpers import attempt_cleanup_collection, attempt_cleanup_index, random_vector diff --git a/tests/integration/control/resources/conftest.py b/tests/integration/control/resources/conftest.py new file mode 100644 index 00000000..93060a66 --- /dev/null +++ b/tests/integration/control/resources/conftest.py @@ -0,0 +1,66 @@ +import os +import pytest +import uuid +import logging +import dotenv +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control/restore_job/__init__.py b/tests/integration/control/resources/index/__init__.py similarity index 100% rename from tests/integration/control/restore_job/__init__.py rename to tests/integration/control/resources/index/__init__.py diff --git a/tests/integration/control/index/test_configure.py b/tests/integration/control/resources/index/test_configure.py similarity index 100% rename from tests/integration/control/index/test_configure.py rename to tests/integration/control/resources/index/test_configure.py diff --git a/tests/integration/control/index/test_create.py b/tests/integration/control/resources/index/test_create.py similarity index 77% rename from tests/integration/control/index/test_create.py rename to tests/integration/control/resources/index/test_create.py index a3aa4406..1591ecd5 100644 --- a/tests/integration/control/index/test_create.py +++ b/tests/integration/control/resources/index/test_create.py @@ -59,10 +59,10 @@ def test_create_infinite_wait(self, pc, index_name): assert resp.metric == "cosine" @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - def test_create_default_index_with_metric(self, pc, create_sl_index_params, metric): - create_sl_index_params["metric"] = metric - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_default_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) if isinstance(metric, str): assert desc.metric == metric else: @@ -105,19 +105,19 @@ def test_create_with_enum_values( assert desc.tags.to_dict() == tags @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - def test_create_dense_index_with_metric(self, pc, create_sl_index_params, metric): - create_sl_index_params["metric"] = metric - create_sl_index_params["vector_type"] = VectorType.DENSE - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_dense_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + create_index_params["vector_type"] = VectorType.DENSE + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.metric == metric assert desc.vector_type == "dense" - def test_create_with_optional_tags(self, pc, create_sl_index_params): + def test_create_with_optional_tags(self, pc, create_index_params): tags = {"foo": "FOO", "bar": "BAR"} - create_sl_index_params["tags"] = tags - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + create_index_params["tags"] = tags + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.tags.to_dict() == tags @@ -211,76 +211,74 @@ def test_pod_index_does_not_support_sparse_vectors(self, pc, index_name, index_t class TestCreateServerlessIndexApiErrorCases: - def test_create_index_with_invalid_name(self, pc, create_sl_index_params): - create_sl_index_params["name"] = "Invalid-name" + def test_create_index_with_invalid_name(self, pc, create_index_params): + create_index_params["name"] = "Invalid-name" with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_invalid_metric(self, pc, create_sl_index_params): - create_sl_index_params["metric"] = "invalid" + def test_create_index_invalid_metric(self, pc, create_index_params): + create_index_params["metric"] = "invalid" with pytest.raises(PineconeApiValueError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_with_invalid_neg_dimension(self, pc, create_sl_index_params): - create_sl_index_params["dimension"] = -1 + def test_create_index_with_invalid_neg_dimension(self, pc, create_index_params): + create_index_params["dimension"] = -1 with pytest.raises(PineconeApiValueError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_that_already_exists(self, pc, create_sl_index_params): - pc.db.index.create(**create_sl_index_params) + def test_create_index_that_already_exists(self, pc, create_index_params): + pc.db.index.create(**create_index_params) with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) class TestCreateServerlessIndexWithTimeout: - def test_create_index_default_timeout(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.db.index.create(**create_sl_index_params) + def test_create_index_default_timeout(self, pc, create_index_params): + create_index_params["timeout"] = None + pc.db.index.create(**create_index_params) # Waits infinitely for index to be ready - desc = pc.db.index.describe(create_sl_index_params["name"]) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.status.ready == True - def test_create_index_when_timeout_set(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = ( + def test_create_index_when_timeout_set(self, pc, create_index_params): + create_index_params["timeout"] = ( 1000 # effectively infinite, but different code path from None ) - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(name=create_sl_index_params["name"]) + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) assert desc.status.ready == True - def test_create_index_with_negative_timeout(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = -1 - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_index_with_negative_timeout(self, pc, create_index_params): + create_index_params["timeout"] = -1 + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) # Returns immediately without waiting for index to be ready assert desc.status.ready in [False, True] class TestCreateIndexTypeErrorCases: - def test_create_index_with_invalid_str_dimension(self, pc, create_sl_index_params): - create_sl_index_params["dimension"] = "10" + def test_create_index_with_invalid_str_dimension(self, pc, create_index_params): + create_index_params["dimension"] = "10" with pytest.raises(PineconeApiTypeError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_with_missing_dimension(self, pc, create_sl_index_params): - del create_sl_index_params["dimension"] + def test_create_index_with_missing_dimension(self, pc, create_index_params): + del create_index_params["dimension"] with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_w_incompatible_options(self, pc, create_sl_index_params): - create_sl_index_params["pod_type"] = "p1.x2" - create_sl_index_params["environment"] = "us-east1-gcp" - create_sl_index_params["replicas"] = 2 + def test_create_index_w_incompatible_options(self, pc, create_index_params): + create_index_params["pod_type"] = "p1.x2" + create_index_params["environment"] = "us-east1-gcp" + create_index_params["replicas"] = 2 with pytest.raises(TypeError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) @pytest.mark.parametrize("required_option", ["name", "spec", "dimension"]) - def test_create_with_missing_required_options( - self, pc, create_sl_index_params, required_option - ): - del create_sl_index_params[required_option] + def test_create_with_missing_required_options(self, pc, create_index_params, required_option): + del create_index_params[required_option] with pytest.raises(Exception) as e: - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) assert required_option.lower() in str(e.value).lower() diff --git a/tests/integration/control/index/test_delete.py b/tests/integration/control/resources/index/test_delete.py similarity index 100% rename from tests/integration/control/index/test_delete.py rename to tests/integration/control/resources/index/test_delete.py diff --git a/tests/integration/control/index/test_describe.py b/tests/integration/control/resources/index/test_describe.py similarity index 65% rename from tests/integration/control/index/test_describe.py rename to tests/integration/control/resources/index/test_describe.py index df7f5896..276176bf 100644 --- a/tests/integration/control/index/test_describe.py +++ b/tests/integration/control/resources/index/test_describe.py @@ -2,20 +2,20 @@ class TestDescribeIndex: - def test_describe_index_when_ready(self, pc, ready_sl_index, create_sl_index_params): + def test_describe_index_when_ready(self, pc, ready_sl_index, create_index_params): description = pc.db.index.describe(ready_sl_index) assert isinstance(description, IndexModel) assert description.name == ready_sl_index - assert description.dimension == create_sl_index_params["dimension"] - assert description.metric == create_sl_index_params["metric"] + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] assert ( description.spec.serverless["cloud"] - == create_sl_index_params["spec"]["serverless"]["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] ) assert ( description.spec.serverless["region"] - == create_sl_index_params["spec"]["serverless"]["region"] + == create_index_params["spec"]["serverless"]["region"] ) assert isinstance(description.host, str) @@ -25,20 +25,20 @@ def test_describe_index_when_ready(self, pc, ready_sl_index, create_sl_index_par assert description.status.state == "Ready" assert description.status.ready == True - def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_sl_index_params): + def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_index_params): description = pc.db.index.describe(notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index - assert description.dimension == create_sl_index_params["dimension"] - assert description.metric == create_sl_index_params["metric"] + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] assert ( description.spec.serverless["cloud"] - == create_sl_index_params["spec"]["serverless"]["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] ) assert ( description.spec.serverless["region"] - == create_sl_index_params["spec"]["serverless"]["region"] + == create_index_params["spec"]["serverless"]["region"] ) assert isinstance(description.host, str) diff --git a/tests/integration/control/index/test_has.py b/tests/integration/control/resources/index/test_has.py similarity index 63% rename from tests/integration/control/index/test_has.py rename to tests/integration/control/resources/index/test_has.py index 1a356a99..8f55766f 100644 --- a/tests/integration/control/index/test_has.py +++ b/tests/integration/control/resources/index/test_has.py @@ -1,10 +1,10 @@ -from tests.integration.helpers import random_string +from ....helpers import random_string class TestHasIndex: - def test_index_exists_success(self, pc, create_sl_index_params): - name = create_sl_index_params["name"] - pc.db.index.create(**create_sl_index_params) + def test_index_exists_success(self, pc, create_index_params): + name = create_index_params["name"] + pc.db.index.create(**create_index_params) has_index = pc.db.index.has(name) assert has_index == True diff --git a/tests/integration/control/index/test_list.py b/tests/integration/control/resources/index/test_list.py similarity index 84% rename from tests/integration/control/index/test_list.py rename to tests/integration/control/resources/index/test_list.py index 4e217ea5..e45d15b9 100644 --- a/tests/integration/control/index/test_list.py +++ b/tests/integration/control/resources/index/test_list.py @@ -2,7 +2,7 @@ class TestListIndexes: - def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_sl_index_params): + def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_index_params): list_response = pc.db.index.list() assert len(list_response.indexes) != 0 assert isinstance(list_response.indexes[0], IndexModel) @@ -11,8 +11,8 @@ def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_sl 0 ] assert created_index.name == ready_sl_index - assert created_index.dimension == create_sl_index_params["dimension"] - assert created_index.metric == create_sl_index_params["metric"] + assert created_index.dimension == create_index_params["dimension"] + assert created_index.metric == create_index_params["metric"] assert ready_sl_index in created_index.host def test_list_indexes_includes_not_ready_indexes(self, pc, notready_sl_index): diff --git a/tests/integration/control_asyncio/backup/__init__.py b/tests/integration/control/resources/restore_job/__init__.py similarity index 100% rename from tests/integration/control_asyncio/backup/__init__.py rename to tests/integration/control/resources/restore_job/__init__.py diff --git a/tests/integration/control/restore_job/test_describe.py b/tests/integration/control/resources/restore_job/test_describe.py similarity index 100% rename from tests/integration/control/restore_job/test_describe.py rename to tests/integration/control/resources/restore_job/test_describe.py diff --git a/tests/integration/control/restore_job/test_list.py b/tests/integration/control/resources/restore_job/test_list.py similarity index 100% rename from tests/integration/control/restore_job/test_list.py rename to tests/integration/control/resources/restore_job/test_list.py diff --git a/tests/integration/control/restore_job/conftest.py b/tests/integration/control/restore_job/conftest.py deleted file mode 100644 index 9798da27..00000000 --- a/tests/integration/control/restore_job/conftest.py +++ /dev/null @@ -1,168 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - for backup in pc.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - backups = pc.db.backup.list() - if len(backups) > 0: - logger.info(f"Deleting {len(backups)} backups") - for backup in backups: - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - else: - logger.info("No backups to delete") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control_asyncio/backup/conftest.py b/tests/integration/control_asyncio/backup/conftest.py deleted file mode 100644 index 3a7a5607..00000000 --- a/tests/integration/control_asyncio/backup/conftest.py +++ /dev/null @@ -1,220 +0,0 @@ -import pytest -import time -import random -import asyncio -import uuid -from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - logger.info( - f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - desc = client.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - client.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - pass - - for backup in client.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - client.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/control_asyncio/conftest.py b/tests/integration/control_asyncio/conftest.py index 33c2b529..acbcce0b 100644 --- a/tests/integration/control_asyncio/conftest.py +++ b/tests/integration/control_asyncio/conftest.py @@ -1,10 +1,8 @@ import pytest import time import random -import asyncio from ..helpers import get_environment_var, generate_index_name import logging -from typing import Callable, Optional, Awaitable, Union from pinecone import ( CloudProvider, @@ -37,79 +35,6 @@ def build_pc(): return build_client -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - @pytest.fixture() def serverless_cloud(): return get_environment_var("SERVERLESS_CLOUD", "aws") diff --git a/tests/integration/control_asyncio/index/conftest.py b/tests/integration/control_asyncio/index/conftest.py deleted file mode 100644 index ea17bc58..00000000 --- a/tests/integration/control_asyncio/index/conftest.py +++ /dev/null @@ -1,206 +0,0 @@ -import pytest -import time -import random -import asyncio -from ...helpers import get_environment_var, generate_index_name -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -@pytest.fixture(scope="session") -def build_pc(): - return build_client - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - print( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception: - pass diff --git a/tests/integration/control_asyncio/index/__init__.py b/tests/integration/control_asyncio/resources/__init__.py similarity index 100% rename from tests/integration/control_asyncio/index/__init__.py rename to tests/integration/control_asyncio/resources/__init__.py diff --git a/tests/integration/control_asyncio/restore_job/__init__.py b/tests/integration/control_asyncio/resources/backup/__init__.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/__init__.py rename to tests/integration/control_asyncio/resources/backup/__init__.py diff --git a/tests/integration/control_asyncio/backup/test_backup.py b/tests/integration/control_asyncio/resources/backup/test_backup.py similarity index 97% rename from tests/integration/control_asyncio/backup/test_backup.py rename to tests/integration/control_asyncio/resources/backup/test_backup.py index 47a67c54..76b720f4 100644 --- a/tests/integration/control_asyncio/backup/test_backup.py +++ b/tests/integration/control_asyncio/resources/backup/test_backup.py @@ -1,7 +1,7 @@ import pytest import random import asyncio -from ...helpers import random_string +from ....helpers import random_string import logging from pinecone import PineconeAsyncio @@ -103,10 +103,6 @@ async def test_create_backup(self, ready_sl_index, index_tags): with pytest.raises(Exception): await pc.db.backup.describe(backup_id=backup.backup_id) - # Verify that the new index is deleted - backup_list = await pc.db.backup.list() - assert len(backup_list) == 0 - async def test_create_backup_legacy_syntax(self, ready_sl_index, index_tags): async with PineconeAsyncio() as pc: desc = await pc.describe_index(name=ready_sl_index) diff --git a/tests/integration/control_asyncio/resources/conftest.py b/tests/integration/control_asyncio/resources/conftest.py new file mode 100644 index 00000000..f7135575 --- /dev/null +++ b/tests/integration/control_asyncio/resources/conftest.py @@ -0,0 +1,66 @@ +import pytest +import uuid +import logging +import dotenv +import os +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control_asyncio/resources/index/__init__.py b/tests/integration/control_asyncio/resources/index/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/resources/index/conftest.py b/tests/integration/control_asyncio/resources/index/conftest.py new file mode 100644 index 00000000..de50f077 --- /dev/null +++ b/tests/integration/control_asyncio/resources/index/conftest.py @@ -0,0 +1,18 @@ +import pytest + +from pinecone import CloudProvider, AwsRegion, ServerlessSpec + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} diff --git a/tests/integration/control_asyncio/index/test_create.py b/tests/integration/control_asyncio/resources/index/test_create.py similarity index 74% rename from tests/integration/control_asyncio/index/test_create.py rename to tests/integration/control_asyncio/resources/index/test_create.py index b85cfebc..5a64c354 100644 --- a/tests/integration/control_asyncio/index/test_create.py +++ b/tests/integration/control_asyncio/resources/index/test_create.py @@ -49,11 +49,13 @@ async def test_create_infinite_wait(self, index_name, spec1): assert resp.metric == "cosine" @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - async def test_create_default_index_with_metric(self, index_name, metric, spec1): + async def test_create_default_index_with_metric(self, index_name, metric, spec1, index_tags): pc = PineconeAsyncio() - await pc.db.index.create(name=index_name, dimension=10, spec=spec1, metric=metric) - desc = await pc.db.index.describe(index_name) + await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, metric=metric, tags=index_tags + ) + desc = await pc.db.index.describe(name=index_name) if isinstance(metric, str): assert desc.metric == metric else: @@ -62,15 +64,15 @@ async def test_create_default_index_with_metric(self, index_name, metric, spec1) await pc.close() @pytest.mark.parametrize( - "metric_enum,vector_type_enum,dim,tags", + "metric_enum,vector_type_enum,dim", [ - (Metric.COSINE, VectorType.DENSE, 10, None), - (Metric.EUCLIDEAN, VectorType.DENSE, 10, {"env": "prod"}), - (Metric.DOTPRODUCT, VectorType.SPARSE, None, {"env": "dev"}), + (Metric.COSINE, VectorType.DENSE, 10), + (Metric.EUCLIDEAN, VectorType.DENSE, 10), + (Metric.DOTPRODUCT, VectorType.SPARSE, None), ], ) async def test_create_with_enum_values_and_tags( - self, index_name, metric_enum, vector_type_enum, dim, tags + self, index_name, metric_enum, vector_type_enum, dim, index_tags ): pc = PineconeAsyncio() args = { @@ -79,14 +81,14 @@ async def test_create_with_enum_values_and_tags( "vector_type": vector_type_enum, "deletion_protection": DeletionProtection.DISABLED, "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), - "tags": tags, + "tags": index_tags, } if dim is not None: args["dimension"] = dim await pc.db.index.create(**args) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.metric == metric_enum.value assert desc.vector_type == vector_type_enum.value assert desc.dimension == dim @@ -94,48 +96,57 @@ async def test_create_with_enum_values_and_tags( assert desc.name == index_name assert desc.spec.serverless.cloud == "aws" assert desc.spec.serverless.region == "us-east-1" - if tags: - assert desc.tags.to_dict() == tags + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) await pc.close() @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - async def test_create_dense_index_with_metric(self, index_name, spec1, metric): + async def test_create_dense_index_with_metric(self, index_name, spec1, metric, index_tags): pc = PineconeAsyncio() await pc.create_index( - name=index_name, dimension=10, spec=spec1, metric=metric, vector_type=VectorType.DENSE + name=index_name, + dimension=10, + spec=spec1, + metric=metric, + vector_type=VectorType.DENSE, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.metric == metric assert desc.vector_type == "dense" await pc.close() - async def test_create_with_optional_tags(self, index_name, spec1): + async def test_create_with_optional_tags(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - tags = {"foo": "FOO", "bar": "BAR"} - await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=tags) + await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=index_tags) - desc = await pc.db.index.describe(index_name) - assert desc.tags.to_dict() == tags + desc = await pc.db.index.describe(name=index_name) + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) await pc.close() - async def test_create_sparse_index(self, index_name, spec1): + async def test_create_sparse_index(self, index_name, spec1, index_tags): pc = PineconeAsyncio() await pc.create_index( - name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE + name=index_name, + spec=spec1, + metric=Metric.DOTPRODUCT, + vector_type=VectorType.SPARSE, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.vector_type == "sparse" assert desc.dimension is None - assert desc.vector_type == "sparse" assert desc.metric == "dotproduct" + await pc.db.index.delete(name=index_name) await pc.close() - async def test_create_with_deletion_protection(self, index_name, spec1): + async def test_create_with_deletion_protection(self, index_name, spec1, index_tags): pc = PineconeAsyncio() await pc.create_index( @@ -144,9 +155,10 @@ async def test_create_with_deletion_protection(self, index_name, spec1): metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE, deletion_protection=DeletionProtection.ENABLED, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.deletion_protection == "enabled" assert desc.metric == "dotproduct" assert desc.vector_type == "sparse" @@ -157,6 +169,6 @@ async def test_create_with_deletion_protection(self, index_name, spec1): await pc.configure_index(index_name, deletion_protection=DeletionProtection.DISABLED) - desc2 = await pc.db.index.describe(index_name) + desc2 = await pc.db.index.describe(name=index_name) assert desc2.deletion_protection == "disabled" await pc.close() diff --git a/tests/integration/control_asyncio/resources/restore_job/__init__.py b/tests/integration/control_asyncio/resources/restore_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/restore_job/test_describe.py b/tests/integration/control_asyncio/resources/restore_job/test_describe.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/test_describe.py rename to tests/integration/control_asyncio/resources/restore_job/test_describe.py diff --git a/tests/integration/control_asyncio/restore_job/test_list.py b/tests/integration/control_asyncio/resources/restore_job/test_list.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/test_list.py rename to tests/integration/control_asyncio/resources/restore_job/test_list.py diff --git a/tests/integration/control_asyncio/restore_job/conftest.py b/tests/integration/control_asyncio/restore_job/conftest.py deleted file mode 100644 index 3a7a5607..00000000 --- a/tests/integration/control_asyncio/restore_job/conftest.py +++ /dev/null @@ -1,220 +0,0 @@ -import pytest -import time -import random -import asyncio -import uuid -from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - logger.info( - f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - desc = client.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - client.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - pass - - for backup in client.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - client.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py index 3b680b3d..afe12395 100644 --- a/tests/integration/helpers/__init__.py +++ b/tests/integration/helpers/__init__.py @@ -9,4 +9,7 @@ embedding_values, jsonprint, index_tags, + delete_backups_from_run, + delete_indexes_from_run, + default_create_index_params, ) diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 4dbe7d22..8cb069dd 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -4,11 +4,14 @@ import random import string import logging +import uuid +import asyncio from typing import Any from datetime import datetime import json from pinecone.db_data import _Index -from typing import List +from pinecone import Pinecone, NotFoundException, PineconeApiException +from typing import List, Callable, Awaitable, Optional, Union logger = logging.getLogger(__name__) @@ -91,8 +94,9 @@ def poll_stats_for_namespace( raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t + logger.debug(f"Found index stats: {stats}.") logger.debug( - f"Found {stats}. Waiting for {expected_count} vectors in namespace {namespace}." + f"Waiting for {expected_count} vectors in namespace {namespace}. Found {stats.namespaces.get(namespace, {'vector_count': 0})['vector_count']} vectors." ) time.sleep(delta_t) @@ -145,3 +149,145 @@ def index_tags(request, run_id): if os.getenv("USER"): tags["user"] = os.getenv("USER") return tags + + +def delete_backups_from_run(pc: Pinecone, run_id: str): + for backup in pc.db.backup.list(): + if backup.tags is not None and backup.tags.get("test-run") == run_id: + pc.db.backup.delete(backup_id=backup.backup_id) + else: + logger.info(f"Backup {backup.name} is not a test backup from run {run_id}. Skipping.") + + +def delete_indexes_from_run(pc: Pinecone, run_id: str): + indexes_to_delete = [] + + for index in pc.db.index.list(): + if index.tags is not None and index.tags.get("test-run") == run_id: + logger.info(f"Found index {index.name} to delete") + if index.deletion_protection == "enabled": + logger.info(f"Index {index.name} has deletion protection enabled. Disabling...") + pc.update_index(index.name, deletion_protection="disabled") + else: + logger.debug( + f"Index {index.name} has deletion protection disabled. Proceeding to delete." + ) + + indexes_to_delete.append(index.name) + else: + logger.info(f"Index {index.name} is not a test index from run {run_id}. Skipping.") + + for index_name in indexes_to_delete: + delete_index_with_retry(client=pc, index_name=index_name, retries=3, sleep_interval=10) + + +def delete_index_with_retry( + client: Pinecone, index_name: str, retries: int = 0, sleep_interval: int = 5 +): + logger.info( + f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_index_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + raise "Unable to delete index " + index_name + + +async def asyncio_poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def asyncio_wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +def default_create_index_params(request, run_id): + github_actor = os.getenv("GITHUB_ACTOR", None) + user = os.getenv("USER", None) + index_owner = github_actor or user or "unknown" + + index_name = f"{index_owner}-{str(uuid.uuid4())}" + tags = index_tags(request, run_id) + cloud = get_environment_var("SERVERLESS_CLOUD", "aws") + region = get_environment_var("SERVERLESS_REGION", "us-west-2") + + spec = {"serverless": {"cloud": cloud, "region": region}} + return {"name": index_name, "dimension": 10, "metric": "cosine", "spec": spec, "tags": tags} From f5c19e9f12c8bb78b76e51f989a1e7dec4d871ff Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Wed, 14 May 2025 20:31:36 -0400 Subject: [PATCH 33/48] Add BYOC support to `create_index`, `describe_index`, `list_indexes` (#482) ## Problem BYOC requires a new type of spec object, `ByocSpec`. ## Solution - Update legacy-style methods to accept new `ByocSpec` type: - `Pinecone#create_index` - `PineconeAsyncio#create_index` - Update resource classes (accessed from `pc.db.index.create`) - `IndexResource#create` - `AsyncioIndexResource#create` - Update interfaces used for docgen ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Test Plan - Added a new unit test to ensure the client can properly deserialize API responses (e.g. from describe, list) that include the new byoc spec type into the `IndexModel` object. This is important since I'm currently not set up to do a full integration test of the feature. --- pinecone/__init__.py | 1 + pinecone/db_control/models/__init__.py | 2 + pinecone/db_control/models/byoc_spec.py | 12 ++++ pinecone/db_control/request_factory.py | 16 +++-- .../db_control/resources/asyncio/index.py | 11 +++- pinecone/db_control/resources/sync/index.py | 11 +++- pinecone/legacy_pinecone_interface.py | 5 +- pinecone/pinecone.py | 3 +- pinecone/pinecone_asyncio.py | 3 +- pinecone/pinecone_interface_asyncio.py | 15 ++--- tests/unit/db_control/test_index.py | 62 +++++++++++++++++++ .../db_control/test_index_request_factory.py | 62 +++++++++++++++++++ 12 files changed, 181 insertions(+), 22 deletions(-) create mode 100644 pinecone/db_control/models/byoc_spec.py create mode 100644 tests/unit/db_control/test_index.py create mode 100644 tests/unit/db_control/test_index_request_factory.py diff --git a/pinecone/__init__.py b/pinecone/__init__.py index d6d73c95..6a66bb22 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -71,6 +71,7 @@ "IndexList": ("pinecone.db_control.models", "IndexList"), "IndexModel": ("pinecone.db_control.models", "IndexModel"), "IndexEmbed": ("pinecone.db_control.models", "IndexEmbed"), + "ByocSpec": ("pinecone.db_control.models", "ByocSpec"), "ServerlessSpec": ("pinecone.db_control.models", "ServerlessSpec"), "ServerlessSpecDefinition": ("pinecone.db_control.models", "ServerlessSpecDefinition"), "PodSpec": ("pinecone.db_control.models", "PodSpec"), diff --git a/pinecone/db_control/models/__init__.py b/pinecone/db_control/models/__init__.py index 66568de3..cf866f11 100644 --- a/pinecone/db_control/models/__init__.py +++ b/pinecone/db_control/models/__init__.py @@ -2,6 +2,7 @@ from .collection_description import CollectionDescription from .serverless_spec import ServerlessSpec from .pod_spec import PodSpec +from .byoc_spec import ByocSpec from .index_list import IndexList from .collection_list import CollectionList from .index_model import IndexModel @@ -18,6 +19,7 @@ "PodSpecDefinition", "ServerlessSpec", "ServerlessSpecDefinition", + "ByocSpec", "IndexList", "CollectionList", "IndexModel", diff --git a/pinecone/db_control/models/byoc_spec.py b/pinecone/db_control/models/byoc_spec.py new file mode 100644 index 00000000..ccbdff4a --- /dev/null +++ b/pinecone/db_control/models/byoc_spec.py @@ -0,0 +1,12 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ByocSpec: + """ + ByocSpec represents the configuration used to deploy a BYOC (Bring Your Own Cloud) index. + + To learn more about the options for each configuration, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) + """ + + environment: str diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index a5d298ca..76fbd6a0 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -27,12 +27,13 @@ from pinecone.core.openapi.db_control.model.serverless_spec import ( ServerlessSpec as ServerlessSpecModel, ) +from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec as ByocSpecModel from pinecone.core.openapi.db_control.model.pod_spec import PodSpec as PodSpecModel from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( CreateIndexFromBackupRequest, ) -from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed +from pinecone.db_control.models import ServerlessSpec, PodSpec, ByocSpec, IndexModel, IndexEmbed from pinecone.db_control.enums import ( Metric, @@ -76,7 +77,7 @@ def __parse_deletion_protection( raise ValueError("deletion_protection must be either 'enabled' or 'disabled'") @staticmethod - def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: + def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec]) -> IndexSpec: if isinstance(spec, dict): if "serverless" in spec: spec["serverless"]["cloud"] = convert_enum_to_string(spec["serverless"]["cloud"]) @@ -100,8 +101,10 @@ def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: indexed=args_dict["metadata_config"].get("indexed", None) ) index_spec = IndexSpec(pod=PodSpecModel(**args_dict)) + elif "byoc" in spec: + index_spec = IndexSpec(byoc=ByocSpecModel(**spec["byoc"])) else: - raise ValueError("spec must contain either 'serverless' or 'pod' key") + raise ValueError("spec must contain either 'serverless', 'pod', or 'byoc' key") elif isinstance(spec, ServerlessSpec): index_spec = IndexSpec( serverless=ServerlessSpecModel(cloud=spec.cloud, region=spec.region) @@ -123,15 +126,18 @@ def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: index_spec = IndexSpec( pod=PodSpecModel(environment=spec.environment, pod_type=spec.pod_type, **args_dict) ) + elif isinstance(spec, ByocSpec): + args_dict = parse_non_empty_args([("environment", spec.environment)]) + index_spec = IndexSpec(byoc=ByocSpecModel(**args_dict)) else: - raise TypeError("spec must be of type dict, ServerlessSpec, or PodSpec") + raise TypeError("spec must be of type dict, ServerlessSpec, PodSpec, or ByocSpec") return index_spec @staticmethod def create_index_request( name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, metric: Optional[Union[Metric, str]] = Metric.COSINE, deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py index b48ff99c..7bb10404 100644 --- a/pinecone/db_control/resources/asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -3,7 +3,14 @@ from typing import Optional, Dict, Union -from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexModel, + IndexList, + IndexEmbed, +) from pinecone.utils import docslinks from pinecone.db_control.enums import ( @@ -33,7 +40,7 @@ def __init__(self, index_api, config): async def create( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, metric: Optional[Union[Metric, str]] = Metric.COSINE, timeout: Optional[int] = None, diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index d5e7d6e2..05068383 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -4,7 +4,14 @@ from pinecone.db_control.index_host_store import IndexHostStore -from pinecone.db_control.models import ServerlessSpec, PodSpec, IndexModel, IndexList, IndexEmbed +from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexModel, + IndexList, + IndexEmbed, +) from pinecone.utils import docslinks, require_kwargs from pinecone.db_control.enums import ( @@ -39,7 +46,7 @@ def __init__(self, index_api, config): def create( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, metric: Optional[Union[Metric, str]] = Metric.COSINE, timeout: Optional[int] = None, diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index cb896022..ad315d70 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -6,6 +6,7 @@ from pinecone.db_control.models import ( ServerlessSpec, PodSpec, + ByocSpec, IndexList, CollectionList, IndexModel, @@ -194,7 +195,7 @@ def __init__( def create_index( self, name: str, - spec: Union[Dict, "ServerlessSpec", "PodSpec"], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int], metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, @@ -214,7 +215,7 @@ def create_index( :type metric: str, optional :param spec: A dictionary containing configurations describing how the index should be deployed. For serverless indexes, specify region and cloud. For pod indexes, specify replicas, shards, pods, pod_type, metadata_config, and source_collection. - Alternatively, use the `ServerlessSpec` or `PodSpec` objects to specify these configurations. + Alternatively, use the `ServerlessSpec`, `PodSpec`, or `ByocSpec` objects to specify these configurations. :type spec: Dict :param dimension: If you are creating an index with `vector_type="dense"` (which is the default), you need to specify `dimension` to indicate the size of your vectors. This should match the dimension of the embeddings you will be inserting. For example, if you are using diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index ae854129..25d48cd7 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -37,6 +37,7 @@ from pinecone.db_control.models import ( ServerlessSpec, PodSpec, + ByocSpec, IndexModel, IndexList, CollectionList, @@ -177,7 +178,7 @@ def index_api(self) -> "ManageIndexesApi": def create_index( self, name: str, - spec: Union[Dict, "ServerlessSpec", "PodSpec"], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int] = None, metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 124ac854..5133f7bd 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -26,6 +26,7 @@ from pinecone.db_control.models import ( ServerlessSpec, PodSpec, + ByocSpec, IndexModel, IndexList, CollectionList, @@ -195,7 +196,7 @@ def index_api(self) -> "AsyncioManageIndexesApi": async def create_index( self, name: str, - spec: Union[Dict, "ServerlessSpec", "PodSpec"], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int] = None, metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, diff --git a/pinecone/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py index 6dfd953c..4b8e1cc1 100644 --- a/pinecone/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -10,6 +10,7 @@ from pinecone.db_control.models import ( ServerlessSpec, PodSpec, + ByocSpec, IndexList, CollectionList, IndexModel, @@ -294,14 +295,12 @@ async def main(): async def create_index( self, name: str, - spec: Union[Dict, "ServerlessSpec", "PodSpec"], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int], - metric: Optional[Union["Metric", str]] = "Metric.COSINE", + metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", - vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", tags: Optional[Dict[str, str]] = None, ): """Creates a Pinecone index. @@ -417,9 +416,7 @@ async def create_index_for_model( region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[ - Union["DeletionProtection", str] - ] = "DeletionProtection.DISABLED", + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", timeout: Optional[int] = None, ) -> "IndexModel": """ diff --git a/tests/unit/db_control/test_index.py b/tests/unit/db_control/test_index.py new file mode 100644 index 00000000..36def11c --- /dev/null +++ b/tests/unit/db_control/test_index.py @@ -0,0 +1,62 @@ +import json + +from pinecone import Config + +from pinecone.db_control.resources.sync.index import IndexResource +from pinecone.openapi_support.api_client import ApiClient +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + + +def build_client_w_faked_response(mocker, body: str, status: int = 200): + response = mocker.Mock() + response.headers = {"content-type": "application/json"} + response.status = status + # Parse the JSON string into a dict + response_data = json.loads(body) + response.data = json.dumps(response_data).encode("utf-8") + + api_client = ApiClient() + mock_request = mocker.patch.object( + api_client.rest_client.pool_manager, "request", return_value=response + ) + index_api = ManageIndexesApi(api_client=api_client) + return IndexResource(index_api=index_api, config=Config(api_key="test-api-key")), mock_request + + +class TestIndexResource: + def test_describe_index(self, mocker): + body = """ + { + "name": "test-index", + "description": "test-description", + "dimension": 1024, + "metric": "cosine", + "spec": { + "byoc": { + "environment": "test-environment" + } + }, + "vector_type": "dense", + "status": { + "ready": true, + "state": "Ready" + }, + "host": "test-host.pinecone.io", + "deletion_protection": "disabled", + "tags": { + "test-tag": "test-value" + } + } + """ + index_resource, mock_request = build_client_w_faked_response(mocker, body) + + desc = index_resource.describe(name="test-index") + assert desc.name == "test-index" + assert desc.description == "test-description" + assert desc.dimension == 1024 + assert desc.metric == "cosine" + assert desc.spec.byoc.environment == "test-environment" + assert desc.vector_type == "dense" + assert desc.status.ready == True + assert desc.deletion_protection == "disabled" + assert desc.tags["test-tag"] == "test-value" diff --git a/tests/unit/db_control/test_index_request_factory.py b/tests/unit/db_control/test_index_request_factory.py new file mode 100644 index 00000000..777486b5 --- /dev/null +++ b/tests/unit/db_control/test_index_request_factory.py @@ -0,0 +1,62 @@ +from pinecone import ByocSpec, ServerlessSpec +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory + + +class TestIndexRequestFactory: + def test_create_index_request_with_spec_byoc(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec=ByocSpec(environment="test-byoc-spec-id"), + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.byoc.environment == "test-byoc-spec-id" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_serverless(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec=ServerlessSpec(cloud="aws", region="us-east-1"), + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.serverless.cloud == "aws" + assert req.spec.serverless.region == "us-east-1" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_serverless_dict(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec={"serverless": {"cloud": "aws", "region": "us-east-1"}}, + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.serverless.cloud == "aws" + assert req.spec.serverless.region == "us-east-1" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_byoc_dict(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec={"byoc": {"environment": "test-byoc-spec-id"}}, + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.byoc.environment == "test-byoc-spec-id" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" From 71fa605c3dbf8538a1a9bfa0ccd7f43c5e59cee2 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Wed, 14 May 2025 20:32:09 -0400 Subject: [PATCH 34/48] Add `py.typed` to indicate type information is present (#485) ## Problem The Pinecone SDK does not currently have full coverage on mypy type annotations, but we are adopting them in an incremental fashion. ## Solution Add a `py.typed` file to the package. An empty `py.typed` file is used as a [marker](https://typing.python.org/en/latest/spec/distributing.html#packaging-typed-libraries) to let mypy or other type checkers know there is type information in the package. ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Test Plan Describe specific steps for validating this change. --- pinecone/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 pinecone/py.typed diff --git a/pinecone/py.typed b/pinecone/py.typed new file mode 100644 index 00000000..e69de29b From b77ecd125d6d1dc33cf5ceb390dbe36dbf09bc70 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Wed, 14 May 2025 21:17:56 -0400 Subject: [PATCH 35/48] Cleanup test resources (#486) ## Problem Some test indexes are not being cleaned up properly because the created indexes are not tagged with the test run id. ## Solution Update tests that are missing tags. ## Type of Change - [x] Bug fix (non-breaking change which fixes an issue) --- scripts/repl.py | 57 +++++++++++++++++++ .../control/resources/index/test_create.py | 36 ++++++------ .../resources/index/test_create.py | 24 ++++---- 3 files changed, 87 insertions(+), 30 deletions(-) diff --git a/scripts/repl.py b/scripts/repl.py index cacf3355..49207362 100644 --- a/scripts/repl.py +++ b/scripts/repl.py @@ -2,6 +2,8 @@ import code from pinecone import Pinecone import logging +import os +import time def main(): @@ -15,18 +17,73 @@ def main(): logging.basicConfig( level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" ) + logger = logging.getLogger(__name__) # Start the interactive REPL banner = """ Welcome to the custom Python REPL! Your initialization steps have been completed. + + Two Pinecone objects are available: + - pc: Interact with the one-offs project + - pcci: Interact with the pinecone-python-client project (CI testing) + + You can use the following functions to clean up the environment: + - delete_all_indexes(pc) + - delete_all_collections(pc) + - delete_all_backups(pc) + - cleanup_all(pc) """ + # In situations where there are a lot of resources, we want to + # slow down the rate of requests + sleep_interval = 30 + + def delete_all_indexes(pc): + for index in pc.db.index.list(): + logger.info(f"Deleting index {index.name}") + try: + if index.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index {index.name}") + pc.db.index.configure(name=index.name, deletion_protection="disabled") + pc.db.index.delete(name=index.name) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting index {index.name}: {e}") + + def delete_all_collections(pc): + for collection in pc.db.collection.list(): + logger.info(f"Deleting collection {collection.name}") + try: + pc.db.collection.delete(name=collection.name) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting collection {collection.name}: {e}") + + def delete_all_backups(pc): + for backup in pc.db.backup.list(): + logger.info(f"Deleting backup {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting backup {backup.name}: {e}") + + def cleanup_all(pc): + delete_all_indexes(pc) + delete_all_collections(pc) + delete_all_backups(pc) + # Create a custom namespace with any pre-loaded variables namespace = { "__name__": "__main__", "__doc__": None, "pc": Pinecone(), + "pcci": Pinecone(api_key=os.environ.get("PINECONE_API_KEY_CI_TESTING")), + "delete_all_indexes": delete_all_indexes, + "delete_all_collections": delete_all_collections, + "delete_all_backups": delete_all_backups, + "cleanup_all": cleanup_all, # Add any other variables you want to have available in the REPL } diff --git a/tests/integration/control/resources/index/test_create.py b/tests/integration/control/resources/index/test_create.py index 1591ecd5..4e2578fb 100644 --- a/tests/integration/control/resources/index/test_create.py +++ b/tests/integration/control/resources/index/test_create.py @@ -17,11 +17,12 @@ class TestCreateServerlessIndexHappyPath: - def test_create_index(self, pc: Pinecone, index_name): + def test_create_index(self, pc: Pinecone, index_name, index_tags): resp = pc.db.index.create( name=index_name, dimension=10, spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, ) assert resp.name == index_name assert resp.dimension == 10 @@ -36,23 +37,25 @@ def test_create_index(self, pc: Pinecone, index_name): assert desc.deletion_protection == "disabled" # default value assert desc.vector_type == "dense" # default value - def test_create_skip_wait(self, pc, index_name): + def test_create_skip_wait(self, pc, index_name, index_tags): resp = pc.db.index.create( name=index_name, dimension=10, spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), timeout=-1, + tags=index_tags, ) assert resp.name == index_name assert resp.dimension == 10 assert resp.metric == "cosine" - def test_create_infinite_wait(self, pc, index_name): + def test_create_infinite_wait(self, pc, index_name, index_tags): resp = pc.db.index.create( name=index_name, dimension=10, spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), timeout=None, + tags=index_tags, ) assert resp.name == index_name assert resp.dimension == 10 @@ -70,15 +73,15 @@ def test_create_default_index_with_metric(self, pc, create_index_params, metric) assert desc.vector_type == "dense" @pytest.mark.parametrize( - "metric_enum,vector_type_enum,dim,tags", + "metric_enum,vector_type_enum,dim", [ - (Metric.COSINE, VectorType.DENSE, 10, None), - (Metric.EUCLIDEAN, VectorType.DENSE, 10, {"env": "prod"}), - (Metric.DOTPRODUCT, VectorType.SPARSE, None, {"env": "dev"}), + (Metric.COSINE, VectorType.DENSE, 10), + (Metric.EUCLIDEAN, VectorType.DENSE, 10), + (Metric.DOTPRODUCT, VectorType.SPARSE, None), ], ) def test_create_with_enum_values( - self, pc, index_name, metric_enum, vector_type_enum, dim, tags + self, pc, index_name, metric_enum, vector_type_enum, dim, index_tags ): args = { "name": index_name, @@ -86,7 +89,7 @@ def test_create_with_enum_values( "vector_type": vector_type_enum, "deletion_protection": DeletionProtection.DISABLED, "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), - "tags": tags, + "tags": index_tags, } if dim is not None: args["dimension"] = dim @@ -101,8 +104,7 @@ def test_create_with_enum_values( assert desc.name == index_name assert desc.spec.serverless.cloud == "aws" assert desc.spec.serverless.region == "us-east-1" - if tags: - assert desc.tags.to_dict() == tags + assert desc.tags.to_dict() == index_tags @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) def test_create_dense_index_with_metric(self, pc, create_index_params, metric): @@ -113,13 +115,6 @@ def test_create_dense_index_with_metric(self, pc, create_index_params, metric): assert desc.metric == metric assert desc.vector_type == "dense" - def test_create_with_optional_tags(self, pc, create_index_params): - tags = {"foo": "FOO", "bar": "BAR"} - create_index_params["tags"] = tags - pc.db.index.create(**create_index_params) - desc = pc.db.index.describe(create_index_params["name"]) - assert desc.tags.to_dict() == tags - class TestCreatePodIndexHappyPath: def test_create_index_minimal_config( @@ -152,7 +147,7 @@ def test_create_index_with_spec_options( metric="cosine", spec=PodSpec( environment=pod_environment, - pod_type="p1.x2", + pod_type="p1.x1", replicas=2, metadata_config={"indexed": ["foo", "bar"]}, ), @@ -164,9 +159,10 @@ def test_create_index_with_spec_options( assert desc.dimension == 10 assert desc.metric == "cosine" assert desc.spec.pod.environment == pod_environment - assert desc.spec.pod.pod_type == "p1.x2" + assert desc.spec.pod.pod_type == "p1.x1" assert desc.spec.pod.replicas == 2 assert desc.spec.pod.metadata_config.indexed == ["foo", "bar"] + assert desc.tags.to_dict() == index_tags def test_create_index_with_deletion_protection( self, pc: Pinecone, index_name, pod_environment, index_tags diff --git a/tests/integration/control_asyncio/resources/index/test_create.py b/tests/integration/control_asyncio/resources/index/test_create.py index 5a64c354..9643e3a5 100644 --- a/tests/integration/control_asyncio/resources/index/test_create.py +++ b/tests/integration/control_asyncio/resources/index/test_create.py @@ -13,11 +13,11 @@ @pytest.mark.asyncio class TestAsyncioCreateIndex: @pytest.mark.parametrize("spec_fixture", ("spec1", "spec2", "spec3")) - async def test_create_index(self, index_name, request, spec_fixture): + async def test_create_index(self, index_name, request, spec_fixture, index_tags): pc = PineconeAsyncio() spec = request.getfixturevalue(spec_fixture) - resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec) + resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec, tags=index_tags) assert resp.name == index_name assert resp.dimension == 10 @@ -33,17 +33,21 @@ async def test_create_index(self, index_name, request, spec_fixture): assert desc.vector_type == "dense" # default value await pc.close() - async def test_create_skip_wait(self, index_name, spec1): + async def test_create_skip_wait(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec1, timeout=-1) + resp = await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, timeout=-1, tags=index_tags + ) assert resp.name == index_name assert resp.dimension == 10 assert resp.metric == "cosine" await pc.close() - async def test_create_infinite_wait(self, index_name, spec1): + async def test_create_infinite_wait(self, index_name, spec1, index_tags): async with PineconeAsyncio() as pc: - resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec1, timeout=None) + resp = await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, timeout=None, tags=index_tags + ) assert resp.name == index_name assert resp.dimension == 10 assert resp.metric == "cosine" @@ -104,7 +108,7 @@ async def test_create_with_enum_values_and_tags( async def test_create_dense_index_with_metric(self, index_name, spec1, metric, index_tags): pc = PineconeAsyncio() - await pc.create_index( + await pc.db.index.create( name=index_name, dimension=10, spec=spec1, @@ -121,7 +125,7 @@ async def test_create_dense_index_with_metric(self, index_name, spec1, metric, i async def test_create_with_optional_tags(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=index_tags) + await pc.db.index.create(name=index_name, dimension=10, spec=spec1, tags=index_tags) desc = await pc.db.index.describe(name=index_name) assert desc.tags.to_dict() == index_tags @@ -131,7 +135,7 @@ async def test_create_with_optional_tags(self, index_name, spec1, index_tags): async def test_create_sparse_index(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - await pc.create_index( + await pc.db.index.create( name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, @@ -149,7 +153,7 @@ async def test_create_sparse_index(self, index_name, spec1, index_tags): async def test_create_with_deletion_protection(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - await pc.create_index( + await pc.db.index.create( name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, From 6b0aeaf10d25d5e3d72c500ec5cd7e4fd61721e2 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Thu, 15 May 2025 09:12:29 -0400 Subject: [PATCH 36/48] Try running tests on preprod (#487) ## Problem Want to isolate the impact of my test activities from prod ## Solution Adjust CI test configuration to add an `x-environment` header to requests using the `PINECONE_ADDITIONAL_HEADERS` environment variable, which is picked up by the configuration parsing. ## Type of Change - [x] Infrastructure change (CI configs, etc) ## Test Plan Describe specific steps for validating this change. --- .github/actions/cleanup-all/action.yml | 5 +++++ .github/actions/create-index-legacy/action.yml | 7 ++++++- .github/actions/create-index/action.yml | 6 ++++++ .github/actions/delete-index/action.yml | 7 ++++++- .github/actions/test-data-asyncio/action.yaml | 5 +++++ .github/actions/test-data-plane/action.yaml | 5 +++++ .../actions/test-dependency-asyncio-rest/action.yaml | 5 +++++ .github/actions/test-dependency-grpc/action.yaml | 5 +++++ .github/actions/test-dependency-rest/action.yaml | 5 +++++ .github/workflows/testing-dependency-asyncio.yaml | 1 + .github/workflows/testing-dependency-grpc.yaml | 3 +++ .github/workflows/testing-dependency-rest.yaml | 3 +++ .github/workflows/testing-dependency.yaml | 2 ++ .github/workflows/testing-integration-asyncio.yaml | 2 ++ .github/workflows/testing-integration.yaml | 8 ++++---- scripts/repl.py | 4 ++++ tests/integration/control/pod/conftest.py | 10 ++-------- tests/integration/control/serverless/conftest.py | 5 +---- .../serverless/test_create_index_for_model_errors.py | 1 + .../test_create_index_for_model_errors.py | 1 + 20 files changed, 72 insertions(+), 18 deletions(-) diff --git a/.github/actions/cleanup-all/action.yml b/.github/actions/cleanup-all/action.yml index b6322e88..a62da2c8 100644 --- a/.github/actions/cleanup-all/action.yml +++ b/.github/actions/cleanup-all/action.yml @@ -9,6 +9,10 @@ inputs: description: 'Delete all indexes and collections' required: false default: 'false' + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: using: 'composite' @@ -25,3 +29,4 @@ runs: env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} DELETE_ALL: ${{ inputs.DELETE_ALL }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} diff --git a/.github/actions/create-index-legacy/action.yml b/.github/actions/create-index-legacy/action.yml index 8bfc9500..a22d5679 100644 --- a/.github/actions/create-index-legacy/action.yml +++ b/.github/actions/create-index-legacy/action.yml @@ -23,6 +23,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: using: 'composite' @@ -36,7 +40,7 @@ runs: shell: bash run: | pip install pinecone-client==${{ inputs.pinecone_client_version }} - + - name: Create index id: create-index shell: bash @@ -44,6 +48,7 @@ runs: env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} PINECONE_ENVIRONMENT: ${{ inputs.PINECONE_ENVIRONMENT }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} INDEX_NAME: ${{ inputs.index_name }} DIMENSION: ${{ inputs.dimension }} METRIC: ${{ inputs.metric } diff --git a/.github/actions/create-index/action.yml b/.github/actions/create-index/action.yml index b81dc1b9..95cfc2be 100644 --- a/.github/actions/create-index/action.yml +++ b/.github/actions/create-index/action.yml @@ -25,6 +25,11 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + outputs: index_name: @@ -48,6 +53,7 @@ runs: run: poetry run python3 scripts/create.py env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} NAME_PREFIX: ${{ inputs.name_prefix }} REGION: ${{ inputs.region }} CLOUD: ${{ inputs.cloud }} diff --git a/.github/actions/delete-index/action.yml b/.github/actions/delete-index/action.yml index 358fb483..9e35c83a 100644 --- a/.github/actions/delete-index/action.yml +++ b/.github/actions/delete-index/action.yml @@ -8,6 +8,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: @@ -26,4 +30,5 @@ runs: run: poetry run python3 scripts/delete.py env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} - INDEX_NAME: ${{ inputs.index_name }} \ No newline at end of file + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} + INDEX_NAME: ${{ inputs.index_name }} diff --git a/.github/actions/test-data-asyncio/action.yaml b/.github/actions/test-data-asyncio/action.yaml index fb349ca9..e81c7c9a 100644 --- a/.github/actions/test-data-asyncio/action.yaml +++ b/.github/actions/test-data-asyncio/action.yaml @@ -15,6 +15,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' python_version: description: 'The version of Python to use' required: false @@ -41,6 +45,7 @@ runs: run: poetry run pytest tests/integration/data_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} USE_GRPC: ${{ inputs.use_grpc }} SPEC: ${{ inputs.spec }} FRESHNESS_TIMEOUT_SECONDS: ${{ inputs.freshness_timeout_seconds }} diff --git a/.github/actions/test-data-plane/action.yaml b/.github/actions/test-data-plane/action.yaml index 3a1c1204..0cbc3023 100644 --- a/.github/actions/test-data-plane/action.yaml +++ b/.github/actions/test-data-plane/action.yaml @@ -18,6 +18,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' python_version: description: 'The version of Python to use' required: false @@ -55,6 +59,7 @@ runs: run: poetry run pytest tests/integration/data --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} USE_GRPC: ${{ inputs.use_grpc }} METRIC: ${{ inputs.metric }} SPEC: ${{ inputs.spec }} diff --git a/.github/actions/test-dependency-asyncio-rest/action.yaml b/.github/actions/test-dependency-asyncio-rest/action.yaml index 192614e1..1efe4a8a 100644 --- a/.github/actions/test-dependency-asyncio-rest/action.yaml +++ b/.github/actions/test-dependency-asyncio-rest/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -43,4 +47,5 @@ runs: command: poetry run pytest tests/dependency/asyncio-rest -s -v env: PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' INDEX_NAME: '${{ inputs.index_name }}' diff --git a/.github/actions/test-dependency-grpc/action.yaml b/.github/actions/test-dependency-grpc/action.yaml index 08afcb18..4ba6d9ac 100644 --- a/.github/actions/test-dependency-grpc/action.yaml +++ b/.github/actions/test-dependency-grpc/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -63,4 +67,5 @@ runs: command: poetry run pytest tests/dependency/grpc -s -v env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} INDEX_NAME: ${{ inputs.index_name }} diff --git a/.github/actions/test-dependency-rest/action.yaml b/.github/actions/test-dependency-rest/action.yaml index 0ba24446..a3487cf3 100644 --- a/.github/actions/test-dependency-rest/action.yaml +++ b/.github/actions/test-dependency-rest/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -42,4 +46,5 @@ runs: command: poetry run pytest tests/dependency/rest -s -v env: PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' INDEX_NAME: '${{ inputs.index_name }}' diff --git a/.github/workflows/testing-dependency-asyncio.yaml b/.github/workflows/testing-dependency-asyncio.yaml index dd0b167b..c1b9e757 100644 --- a/.github/workflows/testing-dependency-asyncio.yaml +++ b/.github/workflows/testing-dependency-asyncio.yaml @@ -27,4 +27,5 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' aiohttp_version: '${{ matrix.aiohttp_version }}' diff --git a/.github/workflows/testing-dependency-grpc.yaml b/.github/workflows/testing-dependency-grpc.yaml index 261bf8dd..80be0065 100644 --- a/.github/workflows/testing-dependency-grpc.yaml +++ b/.github/workflows/testing-dependency-grpc.yaml @@ -53,6 +53,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' @@ -86,6 +87,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' @@ -118,6 +120,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' diff --git a/.github/workflows/testing-dependency-rest.yaml b/.github/workflows/testing-dependency-rest.yaml index c3662cff..403b6ee8 100644 --- a/.github/workflows/testing-dependency-rest.yaml +++ b/.github/workflows/testing-dependency-rest.yaml @@ -30,6 +30,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' @@ -53,6 +54,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' dependency-matrix-rest-313: @@ -75,4 +77,5 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' diff --git a/.github/workflows/testing-dependency.yaml b/.github/workflows/testing-dependency.yaml index 73fe8061..21d613b4 100644 --- a/.github/workflows/testing-dependency.yaml +++ b/.github/workflows/testing-dependency.yaml @@ -19,6 +19,7 @@ jobs: name_prefix: depstest-${{ github.run_number }} dimension: 2 PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' dependency-test-rest: uses: './.github/workflows/testing-dependency-rest.yaml' @@ -55,3 +56,4 @@ jobs: with: index_name: '${{ needs.deps-test-setup.outputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' diff --git a/.github/workflows/testing-integration-asyncio.yaml b/.github/workflows/testing-integration-asyncio.yaml index eb5e3b91..b45f789f 100644 --- a/.github/workflows/testing-integration-asyncio.yaml +++ b/.github/workflows/testing-integration-asyncio.yaml @@ -29,6 +29,7 @@ jobs: run: poetry run pytest tests/integration/data_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' db-control-asyncio: name: db_control asyncio @@ -54,3 +55,4 @@ jobs: run: poetry run pytest tests/integration/control_asyncio/*.py --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 5175d4dc..0db91346 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -7,9 +7,8 @@ jobs: name: Reorg tests runs-on: ubuntu-latest env: - PINECONE_DEBUG_CURL: 'false' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' - PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' strategy: fail-fast: false matrix: @@ -54,8 +53,8 @@ jobs: - name: 'Run integration tests' run: poetry run pytest tests/integration/inference --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: - PINECONE_DEBUG_CURL: 'true' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' dependency-test-asyncio: @@ -84,6 +83,7 @@ jobs: metric: 'cosine' spec: '{ "serverless": { "region": "us-west-2", "cloud": "aws" }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' freshness_timeout_seconds: 600 skip_weird_id_tests: 'true' @@ -146,7 +146,7 @@ jobs: - name: 'Run integration tests (REST)' run: poetry run pytest tests/integration/control/serverless --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: - PINECONE_DEBUG_CURL: 'true' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' SERVERLESS_CLOUD: '${{ matrix.testConfig.serverless.cloud }}' SERVERLESS_REGION: '${{ matrix.testConfig.serverless.region }}' diff --git a/scripts/repl.py b/scripts/repl.py index 49207362..88c218b2 100644 --- a/scripts/repl.py +++ b/scripts/repl.py @@ -74,6 +74,10 @@ def cleanup_all(pc): delete_all_collections(pc) delete_all_backups(pc) + # We want to route through preprod by default + if os.environ.get("PINECONE_ADDITIONAL_HEADERS") is None: + os.environ["PINECONE_ADDITIONAL_HEADERS"] = '{"x-environment": "preprod-aws-0"}' + # Create a custom namespace with any pre-loaded variables namespace = { "__name__": "__main__", diff --git a/tests/integration/control/pod/conftest.py b/tests/integration/control/pod/conftest.py index dc1418eb..cbfdcc72 100644 --- a/tests/integration/control/pod/conftest.py +++ b/tests/integration/control/pod/conftest.py @@ -7,10 +7,7 @@ @pytest.fixture() def client(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) + return Pinecone() @pytest.fixture() @@ -66,10 +63,7 @@ def notready_index(client, index_name, create_index_params): @pytest.fixture(scope="session") def reusable_collection(): - pc = Pinecone( - api_key=get_environment_var("PINECONE_API_KEY"), - additional_headers={"sdk-test-suite": "pinecone-python-client"}, - ) + pc = Pinecone() index_name = generate_index_name("temp-index") dimension = int(get_environment_var("DIMENSION")) print(f"Creating index {index_name} to prepare a collection...") diff --git a/tests/integration/control/serverless/conftest.py b/tests/integration/control/serverless/conftest.py index d32d26fc..d1d880d2 100644 --- a/tests/integration/control/serverless/conftest.py +++ b/tests/integration/control/serverless/conftest.py @@ -11,10 +11,7 @@ @pytest.fixture() def client(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) + return Pinecone() @pytest.fixture() diff --git a/tests/integration/control/serverless/test_create_index_for_model_errors.py b/tests/integration/control/serverless/test_create_index_for_model_errors.py index c08c581d..0fa372d5 100644 --- a/tests/integration/control/serverless/test_create_index_for_model_errors.py +++ b/tests/integration/control/serverless/test_create_index_for_model_errors.py @@ -40,6 +40,7 @@ def test_invalid_cloud(self, client, index_name): ) assert "Invalid value for `cloud`" in str(e.value) + @pytest.mark.skip(reason="This seems to not raise an error in preprod-aws-0") def test_invalid_region(self, client, index_name): with pytest.raises(PineconeApiException) as e: client.create_index_for_model( diff --git a/tests/integration/control_asyncio/test_create_index_for_model_errors.py b/tests/integration/control_asyncio/test_create_index_for_model_errors.py index 36f93c7b..2d104a25 100644 --- a/tests/integration/control_asyncio/test_create_index_for_model_errors.py +++ b/tests/integration/control_asyncio/test_create_index_for_model_errors.py @@ -48,6 +48,7 @@ async def test_invalid_cloud(self, index_name): assert "Invalid value for `cloud`" in str(e.value) await pc.close() + @pytest.mark.skip(reason="This seems to not raise an error in preprod-aws-0") async def test_invalid_region(self, index_name): pc = PineconeAsyncio() From 75d4bd3143de0de459f75b50927b51c4968ab165 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Thu, 15 May 2025 14:04:07 -0400 Subject: [PATCH 37/48] Use `@require_kwargs` decorator on new methods (#480) ## Problem Python methods can be invoked with both positional and keyword arguments, however the keyword argument form has a number of benefits such as: - The keyword argument label acts as a minor form of documentation - Keyword arguments can be passed in any order, whereas positional arguments must be passed in a specific fixed order. This order flexibility works well when there are large numbers of optional parameters - If a function takes several parameters of the same type (e.g. str), it's very easy to accidentally pass them in the wrong order since you won't run into a type-related error letting you know you've done something wrong. Keyword args don't have this problem. - With positional arguments you must pass arguments with no default value before those that have a default value; this means you can't add a new default value without having to shuffle your argument order which creates a breaking change out of what should be a benign UX improvement. Keyword args do not have this limitation. ## Solution I recently implemented a decorator called @kwargs_required that will give an informative error if caller attempts to pass values as positional arguments. This PR is a follow-up to apply that decorator to all methods that are newly added in the upcoming release. Adding this decorator would be a breaking change for existing methods, so for now I will hold off on doing that. ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Testing ```python >>> pc.db.index.describe('foofoo') Traceback (most recent call last): File "", line 1, in File "/Users/jhamon/workspace/pinecone-python-client/pinecone/utils/require_kwargs.py", line 10, in wrapper raise TypeError( TypeError: describe() requires keyword arguments. Please use describe(name=value) >>> pc.db.index.describe(name='foofoo') { "name": "foofoo", "metric": "cosine", "host": "foofoo-dojoi3u.svc.aped-4627-b74a.pinecone.io", "spec": { "serverless": { "cloud": "aws", "region": "us-east-1" } }, "status": { "ready": true, "state": "Ready" }, "vector_type": "dense", "dimension": 2, "deletion_protection": "disabled", "tags": null } ``` --- .../resources/asyncio/collection.py | 16 ++++++------- .../db_control/resources/asyncio/index.py | 24 ++++++++++++++----- .../db_control/resources/sync/collection.py | 11 ++++++--- pinecone/db_control/resources/sync/index.py | 20 ++++++++++++---- pinecone/legacy_pinecone_interface.py | 1 + pinecone/pinecone_asyncio.py | 3 +-- .../control/resources/index/test_create.py | 10 ++++---- .../control/resources/index/test_describe.py | 4 ++-- .../control/resources/index/test_has.py | 6 ++--- .../control/serverless/test_describe_index.py | 2 +- .../control_asyncio/test_describe_index.py | 2 +- 11 files changed, 63 insertions(+), 36 deletions(-) diff --git a/pinecone/db_control/resources/asyncio/collection.py b/pinecone/db_control/resources/asyncio/collection.py index 33c1f3d0..e7d98a66 100644 --- a/pinecone/db_control/resources/asyncio/collection.py +++ b/pinecone/db_control/resources/asyncio/collection.py @@ -1,32 +1,32 @@ import logging -from typing import TYPE_CHECKING - from pinecone.db_control.models import CollectionList from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.utils import require_kwargs logger = logging.getLogger(__name__) """ @private """ -if TYPE_CHECKING: - pass - class CollectionResourceAsyncio: def __init__(self, index_api): self.index_api = index_api - async def create(self, name: str, source: str): + @require_kwargs + async def create(self, *, name: str, source: str): req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) await self.index_api.create_collection(create_collection_request=req) + @require_kwargs async def list(self) -> CollectionList: response = await self.index_api.list_collections() return CollectionList(response) - async def delete(self, name: str): + @require_kwargs + async def delete(self, *, name: str): await self.index_api.delete_collection(name) - async def describe(self, name: str): + @require_kwargs + async def describe(self, *, name: str): return await self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py index 7bb10404..9816f365 100644 --- a/pinecone/db_control/resources/asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -26,7 +26,7 @@ from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict from pinecone.db_control.request_factory import PineconeDBControlRequestFactory from pinecone.core.openapi.db_control import API_VERSION - +from pinecone.utils import require_kwargs logger = logging.getLogger(__name__) """ @private """ @@ -37,8 +37,10 @@ def __init__(self, index_api, config): self._index_api = index_api self._config = config + @require_kwargs async def create( self, + *, name: str, spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, @@ -63,8 +65,10 @@ async def create( return IndexModel(resp) return await self.__poll_describe_index_until_ready(name, timeout) + @require_kwargs async def create_for_model( self, + *, name: str, cloud: Union[CloudProvider, str], region: Union[AwsRegion, GcpRegion, AzureRegion, str], @@ -87,8 +91,10 @@ async def create_for_model( return IndexModel(resp) return await self.__poll_describe_index_until_ready(name, timeout) + @require_kwargs async def create_from_backup( self, + *, name: str, backup_id: str, deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, @@ -141,17 +147,18 @@ async def is_ready() -> bool: return description - async def delete(self, name: str, timeout: Optional[int] = None): + @require_kwargs + async def delete(self, *, name: str, timeout: Optional[int] = None): await self._index_api.delete_index(name) if timeout == -1: return if timeout is None: - while await self.has(name): + while await self.has(name=name): await asyncio.sleep(5) else: - while await self.has(name) and timeout >= 0: + while await self.has(name=name) and timeout >= 0: await asyncio.sleep(5) timeout -= 5 if timeout and timeout < 0: @@ -163,23 +170,28 @@ async def delete(self, name: str, timeout: Optional[int] = None): ) ) + @require_kwargs async def list(self) -> IndexList: response = await self._index_api.list_indexes() return IndexList(response) - async def describe(self, name: str) -> IndexModel: + @require_kwargs + async def describe(self, *, name: str) -> IndexModel: description = await self._index_api.describe_index(name) return IndexModel(description) - async def has(self, name: str) -> bool: + @require_kwargs + async def has(self, *, name: str) -> bool: available_indexes = await self.list() if name in available_indexes.names(): return True else: return False + @require_kwargs async def configure( self, + *, name: str, replicas: Optional[int] = None, pod_type: Optional[Union[PodType, str]] = None, diff --git a/pinecone/db_control/resources/sync/collection.py b/pinecone/db_control/resources/sync/collection.py index 1d8d11d8..8ed703c2 100644 --- a/pinecone/db_control/resources/sync/collection.py +++ b/pinecone/db_control/resources/sync/collection.py @@ -2,6 +2,7 @@ from pinecone.db_control.models import CollectionList from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.utils import require_kwargs logger = logging.getLogger(__name__) """ @private """ @@ -12,16 +13,20 @@ def __init__(self, index_api): self.index_api = index_api """ @private """ - def create(self, name: str, source: str) -> None: + @require_kwargs + def create(self, *, name: str, source: str) -> None: req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) self.index_api.create_collection(create_collection_request=req) + @require_kwargs def list(self) -> CollectionList: response = self.index_api.list_collections() return CollectionList(response) - def delete(self, name: str) -> None: + @require_kwargs + def delete(self, *, name: str) -> None: self.index_api.delete_collection(name) - def describe(self, name: str): + @require_kwargs + def describe(self, *, name: str): return self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index 05068383..11fbeeac 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -43,8 +43,10 @@ def __init__(self, index_api, config): self._index_host_store = IndexHostStore() """ @private """ + @require_kwargs def create( self, + *, name: str, spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, @@ -69,8 +71,10 @@ def create( return IndexModel(resp) return self.__poll_describe_index_until_ready(name, timeout) + @require_kwargs def create_for_model( self, + *, name: str, cloud: Union[CloudProvider, str], region: Union[AwsRegion, GcpRegion, AzureRegion, str], @@ -166,7 +170,8 @@ def is_ready() -> bool: return description - def delete(self, name: str, timeout: Optional[int] = None): + @require_kwargs + def delete(self, *, name: str, timeout: Optional[int] = None): self._index_api.delete_index(name) self._index_host_store.delete_host(self._config, name) @@ -174,10 +179,10 @@ def delete(self, name: str, timeout: Optional[int] = None): return if timeout is None: - while self.has(name): + while self.has(name=name): time.sleep(5) else: - while self.has(name) and timeout >= 0: + while self.has(name=name) and timeout >= 0: time.sleep(5) timeout -= 5 if timeout and timeout < 0: @@ -189,11 +194,13 @@ def delete(self, name: str, timeout: Optional[int] = None): ) ) + @require_kwargs def list(self) -> IndexList: response = self._index_api.list_indexes() return IndexList(response) - def describe(self, name: str) -> IndexModel: + @require_kwargs + def describe(self, *, name: str) -> IndexModel: api_instance = self._index_api description = api_instance.describe_index(name) host = description.host @@ -201,14 +208,17 @@ def describe(self, name: str) -> IndexModel: return IndexModel(description) - def has(self, name: str) -> bool: + @require_kwargs + def has(self, *, name: str) -> bool: if name in self.list().names(): return True else: return False + @require_kwargs def configure( self, + *, name: str, replicas: Optional[int] = None, pod_type: Optional[Union[PodType, str]] = None, diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index ad315d70..27e893d7 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -335,6 +335,7 @@ def create_index_from_backup( @abstractmethod def create_index_for_model( self, + *, name: str, cloud: Union["CloudProvider", str], region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 5133f7bd..606210a4 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -4,8 +4,7 @@ from pinecone.config import PineconeConfig, ConfigBuilder -from pinecone.utils import normalize_host, require_kwargs -from pinecone.utils import docslinks +from pinecone.utils import normalize_host, require_kwargs, docslinks from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface from .pinecone import check_realistic_host diff --git a/tests/integration/control/resources/index/test_create.py b/tests/integration/control/resources/index/test_create.py index 4e2578fb..55bf66c2 100644 --- a/tests/integration/control/resources/index/test_create.py +++ b/tests/integration/control/resources/index/test_create.py @@ -65,7 +65,7 @@ def test_create_infinite_wait(self, pc, index_name, index_tags): def test_create_default_index_with_metric(self, pc, create_index_params, metric): create_index_params["metric"] = metric pc.db.index.create(**create_index_params) - desc = pc.db.index.describe(create_index_params["name"]) + desc = pc.db.index.describe(name=create_index_params["name"]) if isinstance(metric, str): assert desc.metric == metric else: @@ -96,7 +96,7 @@ def test_create_with_enum_values( pc.db.index.create(**args) - desc = pc.db.index.describe(index_name) + desc = pc.db.index.describe(name=index_name) assert desc.metric == metric_enum.value assert desc.vector_type == vector_type_enum.value assert desc.dimension == dim @@ -111,7 +111,7 @@ def test_create_dense_index_with_metric(self, pc, create_index_params, metric): create_index_params["metric"] = metric create_index_params["vector_type"] = VectorType.DENSE pc.db.index.create(**create_index_params) - desc = pc.db.index.describe(create_index_params["name"]) + desc = pc.db.index.describe(name=create_index_params["name"]) assert desc.metric == metric assert desc.vector_type == "dense" @@ -233,7 +233,7 @@ def test_create_index_default_timeout(self, pc, create_index_params): create_index_params["timeout"] = None pc.db.index.create(**create_index_params) # Waits infinitely for index to be ready - desc = pc.db.index.describe(create_index_params["name"]) + desc = pc.db.index.describe(name=create_index_params["name"]) assert desc.status.ready == True def test_create_index_when_timeout_set(self, pc, create_index_params): @@ -247,7 +247,7 @@ def test_create_index_when_timeout_set(self, pc, create_index_params): def test_create_index_with_negative_timeout(self, pc, create_index_params): create_index_params["timeout"] = -1 pc.db.index.create(**create_index_params) - desc = pc.db.index.describe(create_index_params["name"]) + desc = pc.db.index.describe(name=create_index_params["name"]) # Returns immediately without waiting for index to be ready assert desc.status.ready in [False, True] diff --git a/tests/integration/control/resources/index/test_describe.py b/tests/integration/control/resources/index/test_describe.py index 276176bf..eacb412e 100644 --- a/tests/integration/control/resources/index/test_describe.py +++ b/tests/integration/control/resources/index/test_describe.py @@ -3,7 +3,7 @@ class TestDescribeIndex: def test_describe_index_when_ready(self, pc, ready_sl_index, create_index_params): - description = pc.db.index.describe(ready_sl_index) + description = pc.db.index.describe(name=ready_sl_index) assert isinstance(description, IndexModel) assert description.name == ready_sl_index @@ -26,7 +26,7 @@ def test_describe_index_when_ready(self, pc, ready_sl_index, create_index_params assert description.status.ready == True def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_index_params): - description = pc.db.index.describe(notready_sl_index) + description = pc.db.index.describe(name=notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index diff --git a/tests/integration/control/resources/index/test_has.py b/tests/integration/control/resources/index/test_has.py index 8f55766f..62aba165 100644 --- a/tests/integration/control/resources/index/test_has.py +++ b/tests/integration/control/resources/index/test_has.py @@ -5,14 +5,14 @@ class TestHasIndex: def test_index_exists_success(self, pc, create_index_params): name = create_index_params["name"] pc.db.index.create(**create_index_params) - has_index = pc.db.index.has(name) + has_index = pc.db.index.has(name=name) assert has_index == True def test_index_does_not_exist(self, pc): name = random_string(8) - has_index = pc.db.index.has(name) + has_index = pc.db.index.has(name=name) assert has_index == False def test_has_index_with_null_index_name(self, pc): - has_index = pc.db.index.has("") + has_index = pc.db.index.has(name="") assert has_index == False diff --git a/tests/integration/control/serverless/test_describe_index.py b/tests/integration/control/serverless/test_describe_index.py index 18d22372..87f0876c 100644 --- a/tests/integration/control/serverless/test_describe_index.py +++ b/tests/integration/control/serverless/test_describe_index.py @@ -26,7 +26,7 @@ def test_describe_index_when_ready(self, client, ready_sl_index, create_sl_index assert description.status.ready == True def test_describe_index_when_not_ready(self, client, notready_sl_index, create_sl_index_params): - description = client.describe_index(notready_sl_index) + description = client.describe_index(name=notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index diff --git a/tests/integration/control_asyncio/test_describe_index.py b/tests/integration/control_asyncio/test_describe_index.py index f9c5614d..e8c4aff3 100644 --- a/tests/integration/control_asyncio/test_describe_index.py +++ b/tests/integration/control_asyncio/test_describe_index.py @@ -31,7 +31,7 @@ async def test_describe_index_when_ready(self, ready_sl_index, create_sl_index_p async def test_describe_index_when_not_ready(self, notready_sl_index, create_sl_index_params): pc = PineconeAsyncio() - description = await pc.describe_index(notready_sl_index) + description = await pc.describe_index(name=notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index From 4582a48d39dacc05d2826ec5118720bac78ddd58 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Thu, 15 May 2025 14:06:20 -0400 Subject: [PATCH 38/48] Use lazy loading in error decorator, debugging config (#481) ## Problem We want to continue improving our import and initialization times. ## Solution - Refactor the `@validate_and_convert_errors` decorator to use lazy-loading of `urllib3`. We only need to load urllib3 in this decorator at the moment an exception has occurred. - Refactor `pinecone.config.openapi_configuration.py` to avoid loading the `http` module unless enabling debug logging. The default http log level is already 0 (disabled) so there's no need to set it in the default case. ## Perf testing ```sh poetry run python3 -X importtime -c "from pinecone import Pinecone; pc = Pinecone(api_key='foo')" 2> main.log ``` Comparing the importtime results before and after this change shows a reduction from 101ms to 65ms, **a savings of 36ms which is about 36%.** Screenshot 2025-05-14 at 4 00 38 AM Screenshot 2025-05-14 at 4 50 58 AM --- pinecone/config/openapi_configuration.py | 43 +++++++++++++++++------- pinecone/utils/error_handling.py | 24 +++++++++---- 2 files changed, 47 insertions(+), 20 deletions(-) diff --git a/pinecone/config/openapi_configuration.py b/pinecone/config/openapi_configuration.py index fce6defc..c3ce79a4 100644 --- a/pinecone/config/openapi_configuration.py +++ b/pinecone/config/openapi_configuration.py @@ -2,7 +2,6 @@ import logging import multiprocessing -from http import client as http_client from pinecone.exceptions import PineconeApiValueError from typing import TypedDict @@ -154,6 +153,7 @@ def __init__( self.logger_file = None """Debug file location """ + # Initialize debug directly without using the property setter self.debug = False """Debug switch """ @@ -288,7 +288,7 @@ def debug(self): :param value: The debug status, True or False. :type: bool """ - return self.__debug + return self._debug @debug.setter def debug(self, value): @@ -297,20 +297,37 @@ def debug(self, value): :param value: The debug status, True or False. :type: bool """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging + if hasattr(self, "_debug"): + previous_debug = self._debug + else: + previous_debug = None + self._debug = value + + def enable_http_logging(): + from http import client as http_client + + http_client.HTTPConnection.debuglevel = 1 + + def disable_http_logging(): + from http import client as http_client + + http_client.HTTPConnection.debuglevel = 0 + + def set_default_log_level(c): + for _, logger in c.logger.items(): + logger.setLevel(logging.WARNING) + + if self._debug: for _, logger in self.logger.items(): logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 + enable_http_logging() + elif previous_debug is True and self._debug is False: + set_default_log_level(self) + disable_http_logging() else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in self.logger.items(): - logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 + # On the initial call, we don't need to do anything to http + # logging, since it's not enabled by default. + set_default_log_level(self) @property def logger_format(self): diff --git a/pinecone/utils/error_handling.py b/pinecone/utils/error_handling.py index 5cdaaaf4..c18090eb 100644 --- a/pinecone/utils/error_handling.py +++ b/pinecone/utils/error_handling.py @@ -1,7 +1,11 @@ import inspect from functools import wraps -from urllib3.exceptions import MaxRetryError, ProtocolError + +class ProtocolError(Exception): + """Raised when there is a protocol error in the connection.""" + + pass def validate_and_convert_errors(func): @@ -9,15 +13,21 @@ def validate_and_convert_errors(func): def inner_func(*args, **kwargs): try: return func(*args, **kwargs) - except MaxRetryError as e: - if isinstance(e.reason, ProtocolError): + except Exception as e: + # Lazy import of urllib3 exceptions + from urllib3.exceptions import MaxRetryError, ProtocolError as Urllib3ProtocolError + + if isinstance(e, MaxRetryError): + if isinstance(e.reason, Urllib3ProtocolError): + raise ProtocolError(f"Failed to connect to {e.url}") from e + else: + raise e from e + elif isinstance(e, Urllib3ProtocolError): raise ProtocolError( - f"Failed to connect to {e.url}; did you specify the correct index name?" + "Connection failed. Please verify that the index host is correct and accessible." ) from e else: - raise - except ProtocolError as e: - raise ProtocolError("Failed to connect; did you specify the correct index name?") from e + raise e from e # Override signature sig = inspect.signature(func) From c617bb2c2efbb9291a40c7fd13e2c729197a3716 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Thu, 15 May 2025 16:02:10 -0400 Subject: [PATCH 39/48] Make resource classes extend `PluginAware` so they can be extended in the future (#484) ## Problem We need these classes to extend `PluginAware` in case we ever want to add functions via plugin in the future. ## Solution Adjust the constructor functions for each of these resource classes to set the properties needed by `PluginAware`. ## Type of Change - [x] New feature (non-breaking change which adds functionality) --- pinecone/db_control/db_control.py | 38 ++++++-- pinecone/db_control/resources/sync/backup.py | 28 +++++- .../db_control/resources/sync/collection.py | 29 +++++- pinecone/db_control/resources/sync/index.py | 94 +++++++++++-------- .../db_control/resources/sync/restore_job.py | 30 +++++- pinecone/utils/plugin_aware.py | 2 - tests/unit/db_control/test_index.py | 10 +- 7 files changed, 166 insertions(+), 65 deletions(-) diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index 0f11c209..ec6a412b 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -4,7 +4,7 @@ from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi from pinecone.openapi_support.api_client import ApiClient -from pinecone.utils import setup_openapi_client +from pinecone.utils import setup_openapi_client, PluginAware from pinecone.core.openapi.db_control import API_VERSION @@ -19,11 +19,11 @@ from pinecone.config import Config, OpenApiConfiguration -class DBControl: +class DBControl(PluginAware): def __init__( self, config: "Config", openapi_config: "OpenApiConfiguration", pool_threads: int ) -> None: - self._config = config + self.config = config """ @private """ self._openapi_config = openapi_config @@ -35,7 +35,7 @@ def __init__( self._index_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=ManageIndexesApi, - config=self._config, + config=self.config, openapi_config=self._openapi_config, pool_threads=self._pool_threads, api_version=API_VERSION, @@ -54,12 +54,19 @@ def __init__( self._backup_resource: Optional["BackupResource"] = None """ @private """ + super().__init__() # Initialize PluginAware + @property def index(self) -> "IndexResource": if self._index_resource is None: from .resources.sync.index import IndexResource - self._index_resource = IndexResource(index_api=self._index_api, config=self._config) + self._index_resource = IndexResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) return self._index_resource @property @@ -67,7 +74,12 @@ def collection(self) -> "CollectionResource": if self._collection_resource is None: from .resources.sync.collection import CollectionResource - self._collection_resource = CollectionResource(self._index_api) + self._collection_resource = CollectionResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) return self._collection_resource @property @@ -75,7 +87,12 @@ def restore_job(self) -> "RestoreJobResource": if self._restore_job_resource is None: from .resources.sync.restore_job import RestoreJobResource - self._restore_job_resource = RestoreJobResource(self._index_api) + self._restore_job_resource = RestoreJobResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) return self._restore_job_resource @property @@ -83,5 +100,10 @@ def backup(self) -> "BackupResource": if self._backup_resource is None: from .resources.sync.backup import BackupResource - self._backup_resource = BackupResource(self._index_api) + self._backup_resource = BackupResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) return self._backup_resource diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py index dbc576e9..8d5d2a0c 100644 --- a/pinecone/db_control/resources/sync/backup.py +++ b/pinecone/db_control/resources/sync/backup.py @@ -1,16 +1,36 @@ -from typing import Optional +from typing import Optional, TYPE_CHECKING from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest from pinecone.db_control.models import BackupModel, BackupList -from pinecone.utils import parse_non_empty_args, require_kwargs +from pinecone.utils import parse_non_empty_args, require_kwargs, PluginAware +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration -class BackupResource: - def __init__(self, index_api: ManageIndexesApi): + +class BackupResource(PluginAware): + def __init__( + self, + index_api: ManageIndexesApi, + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): self._index_api = index_api """ @private """ + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + @require_kwargs def list( self, diff --git a/pinecone/db_control/resources/sync/collection.py b/pinecone/db_control/resources/sync/collection.py index 8ed703c2..950452e6 100644 --- a/pinecone/db_control/resources/sync/collection.py +++ b/pinecone/db_control/resources/sync/collection.py @@ -1,18 +1,41 @@ +from typing import TYPE_CHECKING import logging from pinecone.db_control.models import CollectionList from pinecone.db_control.request_factory import PineconeDBControlRequestFactory -from pinecone.utils import require_kwargs +from pinecone.utils import PluginAware, require_kwargs logger = logging.getLogger(__name__) """ @private """ +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.config import Config, OpenApiConfiguration -class CollectionResource: - def __init__(self, index_api): + +class CollectionResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): self.index_api = index_api """ @private """ + + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + @require_kwargs def create(self, *, name: str, source: str) -> None: req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index 11fbeeac..0e69140c 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -1,29 +1,12 @@ import time import logging -from typing import Optional, Dict, Union +from typing import Optional, Dict, Union, TYPE_CHECKING from pinecone.db_control.index_host_store import IndexHostStore -from pinecone.db_control.models import ( - ServerlessSpec, - PodSpec, - ByocSpec, - IndexModel, - IndexList, - IndexEmbed, -) -from pinecone.utils import docslinks, require_kwargs - -from pinecone.db_control.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) +from pinecone.db_control.models import IndexModel, IndexList, IndexEmbed +from pinecone.utils import docslinks, require_kwargs, PluginAware + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict from pinecone.db_control.request_factory import PineconeDBControlRequestFactory from pinecone.core.openapi.db_control import API_VERSION @@ -31,29 +14,58 @@ logger = logging.getLogger(__name__) """ @private """ - -class IndexResource: - def __init__(self, index_api, config): +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.models import ServerlessSpec, PodSpec, ByocSpec, IndexEmbed + + +class IndexResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): self._index_api = index_api """ @private """ - self._config = config + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads """ @private """ self._index_host_store = IndexHostStore() """ @private """ + super().__init__() # Initialize PluginAware + @require_kwargs def create( self, *, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int] = None, - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", tags: Optional[Dict[str, str]] = None, ) -> IndexModel: req = PineconeDBControlRequestFactory.create_index_request( @@ -76,11 +88,11 @@ def create_for_model( self, *, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", timeout: Optional[int] = None, ) -> IndexModel: req = PineconeDBControlRequestFactory.create_index_for_model_request( @@ -103,7 +115,7 @@ def create_from_backup( *, name: str, backup_id: str, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", tags: Optional[Dict[str, str]] = None, timeout: Optional[int] = None, ) -> IndexModel: @@ -171,9 +183,9 @@ def is_ready() -> bool: return description @require_kwargs - def delete(self, *, name: str, timeout: Optional[int] = None): + def delete(self, *, name: str, timeout: Optional[int] = None) -> None: self._index_api.delete_index(name) - self._index_host_store.delete_host(self._config, name) + self._index_host_store.delete_host(self.config, name) if timeout == -1: return @@ -204,7 +216,7 @@ def describe(self, *, name: str) -> IndexModel: api_instance = self._index_api description = api_instance.describe_index(name) host = description.host - self._index_host_store.set_host(self._config, name, host) + self._index_host_store.set_host(self.config, name, host) return IndexModel(description) @@ -221,10 +233,10 @@ def configure( *, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, - ): + ) -> None: api_instance = self._index_api description = self.describe(name=name) @@ -240,5 +252,5 @@ def configure( def _get_host(self, name: str) -> str: """@private""" return self._index_host_store.get_host( - api=self._index_api, config=self._config, index_name=name + api=self._index_api, config=self.config, index_name=name ) diff --git a/pinecone/db_control/resources/sync/restore_job.py b/pinecone/db_control/resources/sync/restore_job.py index b314bc53..e47010c8 100644 --- a/pinecone/db_control/resources/sync/restore_job.py +++ b/pinecone/db_control/resources/sync/restore_job.py @@ -1,15 +1,35 @@ -from typing import Optional +from typing import Optional, TYPE_CHECKING -from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi from pinecone.db_control.models import RestoreJobModel, RestoreJobList -from pinecone.utils import parse_non_empty_args, require_kwargs +from pinecone.utils import parse_non_empty_args, require_kwargs, PluginAware +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi -class RestoreJobResource: - def __init__(self, index_api: ManageIndexesApi): + +class RestoreJobResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): self._index_api = index_api """ @private """ + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + @require_kwargs def get(self, *, job_id: str) -> RestoreJobModel: """ diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index 4a27351a..56c54e90 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -36,8 +36,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: Raises: AttributeError: If required attributes are not set in the subclass. """ - logger.debug("PluginAware __init__ called for %s", self.__class__.__name__) - self._plugins_loaded = False """ @private """ diff --git a/tests/unit/db_control/test_index.py b/tests/unit/db_control/test_index.py index 36def11c..5fca4b18 100644 --- a/tests/unit/db_control/test_index.py +++ b/tests/unit/db_control/test_index.py @@ -1,6 +1,6 @@ import json -from pinecone import Config +from pinecone.config import Config, OpenApiConfiguration from pinecone.db_control.resources.sync.index import IndexResource from pinecone.openapi_support.api_client import ApiClient @@ -20,7 +20,13 @@ def build_client_w_faked_response(mocker, body: str, status: int = 200): api_client.rest_client.pool_manager, "request", return_value=response ) index_api = ManageIndexesApi(api_client=api_client) - return IndexResource(index_api=index_api, config=Config(api_key="test-api-key")), mock_request + resource = IndexResource( + index_api=index_api, + config=Config(api_key="test-api-key"), + openapi_config=OpenApiConfiguration(), + pool_threads=1, + ) + return resource, mock_request class TestIndexResource: From 6a12a6328e9e08b67bc71b8b6cd5fbbe96e399bf Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Thu, 15 May 2025 16:02:34 -0400 Subject: [PATCH 40/48] Test assistant plugin installation (#489) ## Problem We need to verify the assistant plugin is able to be installed correctly ## Solution - Add a simple test of one of the read-only functions within the plugin. This verifies the plugin has been installed correctly. - Add CI build for plugin tests ## Type of Change - [x] Infrastructure change (CI configs, etc) --- .github/workflows/testing-integration.yaml | 43 +++++++++++++++++----- codegen/apis | 2 +- codegen/python-oas-templates | 2 +- tests/integration/plugins/test_plugins.py | 8 ++++ 4 files changed, 44 insertions(+), 11 deletions(-) create mode 100644 tests/integration/plugins/test_plugins.py diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 0db91346..f71ef3a1 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -4,7 +4,7 @@ name: "Integration Tests" jobs: reorg: - name: Reorg tests + name: Resource ${{ matrix.test_suite }} runs-on: ubuntu-latest env: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' @@ -14,13 +14,13 @@ jobs: matrix: python_version: [3.9, 3.12] test_suite: - - tests/integration/control/resources/index - - tests/integration/control/resources/collections - - tests/integration/control/resources/backup - - tests/integration/control/resources/restore_job - - tests/integration/control_asyncio/resources/index - - tests/integration/control_asyncio/resources/backup - - tests/integration/control_asyncio/resources/restore_job + - control/resources/index + - control/resources/collections + - control/resources/backup + - control/resources/restore_job + - control_asyncio/resources/index + - control_asyncio/resources/backup + - control_asyncio/resources/restore_job steps: - uses: actions/checkout@v4 - name: 'Set up Python ${{ matrix.python_version }}' @@ -32,7 +32,7 @@ jobs: with: include_asyncio: true - name: 'Run tests' - run: poetry run pytest ${{ matrix.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG + run: poetry run pytest tests/integration/${{ matrix.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG inference: name: Inference tests @@ -56,6 +56,29 @@ jobs: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + plugins: + name: Plugin installation + runs-on: ubuntu-latest + strategy: + matrix: + python_version: [3.9, 3.12] + steps: + - uses: actions/checkout@v4 + - name: 'Set up Python ${{ matrix.python_version }}' + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python_version }}' + - name: Setup Poetry + uses: ./.github/actions/setup-poetry + with: + include_asyncio: true + - name: 'Run integration tests' + run: poetry run pytest tests/integration/plugins --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG + env: + PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + + dependency-test-asyncio: uses: './.github/workflows/testing-integration-asyncio.yaml' @@ -67,6 +90,7 @@ jobs: runs-on: ubuntu-latest needs: - inference + - plugins strategy: fail-fast: false matrix: @@ -125,6 +149,7 @@ jobs: runs-on: ubuntu-latest needs: - inference + - plugins strategy: matrix: testConfig: diff --git a/codegen/apis b/codegen/apis index 09015d91..7e21ca9a 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit 09015d9106f2578e473f45f55120aafc5c559f2a +Subproject commit 7e21ca9adb6a530ce11909d6209d69551f86e9bd diff --git a/codegen/python-oas-templates b/codegen/python-oas-templates index c7c75f57..2ba53806 160000 --- a/codegen/python-oas-templates +++ b/codegen/python-oas-templates @@ -1 +1 @@ -Subproject commit c7c75f57c6dfd0228a7bead444ea1004c57e0de3 +Subproject commit 2ba53806258cc8ab42ced7e52ba84dce1e977c6d diff --git a/tests/integration/plugins/test_plugins.py b/tests/integration/plugins/test_plugins.py new file mode 100644 index 00000000..e0c36569 --- /dev/null +++ b/tests/integration/plugins/test_plugins.py @@ -0,0 +1,8 @@ +from pinecone import Pinecone + + +class TestAssistantPlugin: + def test_assistant_plugin(self): + pc = Pinecone() + pc.assistant.list_assistants() + assert True, "This should pass without errors" From c1688f6edf5710abf64411598f59ff8bf04cbe2f Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Fri, 16 May 2025 11:24:50 -0400 Subject: [PATCH 41/48] Expose new `pc.inference.list_models()` and `pc.inference.get_model()` (#488) ## Problem We need to expose a new endpoint for discovering available inference models ## Solution - Regenerate code off the latest spec - Wire the new method up in the sync and async implementations of Inference - `pc.inference.get_model` - `pc.inference.list_models` - Make some adjustments in model_utils to be less fragile if unexpected values appear in enum fields - Implement new tests for these list_models endpoints. ## Usage ```python from pinecone import Pinecone pc = Pinecone() models = pc.inference.list_models() models[0] # { # "model": "llama-text-embed-v2", # "short_description": "A high performance dense embedding model optimized for multilingual and cross-lingual text question-answering retrieval with support for long documents (up to 2048 tokens) and dynamic embedding size (Matryoshka Embeddings).", # "type": "embed", # "supported_parameters": [ # { # "parameter": "input_type", # "type": "one_of", # "value_type": "string", # "required": true, # "allowed_values": [ # "query", # "passage" # ] # }, # { # "parameter": "truncate", # "type": "one_of", # "value_type": "string", # "required": false, # "default": "END", # "allowed_values": [ # "END", # "NONE", # "START" # ] # }, # { # "parameter": "dimension", # "type": "one_of", # "value_type": "integer", # "required": false, # "default": 1024, # "allowed_values": [ # 384, # 512, # 768, # 1024, # 2048 # ] # } # ], # "vector_type": "dense", # "default_dimension": 1024, # "modality": "text", # "max_sequence_length": 2048, # "max_batch_size": 96, # "provider_name": "NVIDIA", # "supported_metrics": [ # "Cosine", # "DotProduct" # ], # "supported_dimensions": [ # 384, # 512, # 768, # 1024, # 2048 # ] # } ``` And async ```python import asyncio from pinecone import PineconeAsyncio async def main(): with PineconeAsyncio() as pc: await pc.inference.list_models() asyncio.run(main()) ``` ## Type of Change - [x] New feature (non-breaking change which adds functionality) --- pinecone/__init__.py | 4 + pinecone/__init__.pyi | 14 +- .../openapi/db_control/model/backup_list.py | 5 + .../openapi/db_control/model/backup_model.py | 5 + .../openapi/db_control/model/byoc_spec.py | 5 + .../db_control/model/collection_list.py | 5 + .../db_control/model/collection_model.py | 5 + .../model/configure_index_request.py | 5 + .../model/configure_index_request_embed.py | 5 + .../model/configure_index_request_spec.py | 5 + .../model/configure_index_request_spec_pod.py | 5 + .../db_control/model/create_backup_request.py | 5 + .../model/create_collection_request.py | 5 + .../model/create_index_for_model_request.py | 5 + .../create_index_for_model_request_embed.py | 5 + .../model/create_index_from_backup_request.py | 5 + .../create_index_from_backup_response.py | 5 + .../db_control/model/create_index_request.py | 5 + .../db_control/model/deletion_protection.py | 5 + .../db_control/model/error_response.py | 5 + .../db_control/model/error_response_error.py | 5 + .../openapi/db_control/model/index_list.py | 5 + .../openapi/db_control/model/index_model.py | 5 + .../db_control/model/index_model_spec.py | 5 + .../db_control/model/index_model_status.py | 5 + .../openapi/db_control/model/index_spec.py | 5 + .../openapi/db_control/model/index_tags.py | 5 + .../db_control/model/model_index_embed.py | 5 + .../db_control/model/pagination_response.py | 5 + .../core/openapi/db_control/model/pod_spec.py | 5 + .../model/pod_spec_metadata_config.py | 5 + .../db_control/model/restore_job_list.py | 5 + .../db_control/model/restore_job_model.py | 5 + .../db_control/model/serverless_spec.py | 5 + .../openapi/db_data/model/delete_request.py | 5 + .../model/describe_index_stats_request.py | 5 + .../openapi/db_data/model/fetch_response.py | 5 + pinecone/core/openapi/db_data/model/hit.py | 5 + .../db_data/model/import_error_mode.py | 5 + .../openapi/db_data/model/import_model.py | 5 + .../db_data/model/index_description.py | 5 + .../db_data/model/list_imports_response.py | 5 + .../core/openapi/db_data/model/list_item.py | 5 + .../db_data/model/list_namespaces_response.py | 5 + .../openapi/db_data/model/list_response.py | 5 + .../db_data/model/namespace_description.py | 5 + .../db_data/model/namespace_summary.py | 5 + .../core/openapi/db_data/model/pagination.py | 5 + .../openapi/db_data/model/protobuf_any.py | 5 + .../db_data/model/protobuf_null_value.py | 5 + .../openapi/db_data/model/query_request.py | 5 + .../openapi/db_data/model/query_response.py | 5 + .../openapi/db_data/model/query_vector.py | 5 + .../core/openapi/db_data/model/rpc_status.py | 5 + .../openapi/db_data/model/scored_vector.py | 5 + .../db_data/model/search_records_request.py | 5 + .../model/search_records_request_query.py | 5 + .../model/search_records_request_rerank.py | 5 + .../db_data/model/search_records_response.py | 5 + .../model/search_records_response_result.py | 5 + .../db_data/model/search_records_vector.py | 5 + .../openapi/db_data/model/search_usage.py | 5 + .../openapi/db_data/model/search_vector.py | 5 + .../db_data/model/single_query_results.py | 5 + .../openapi/db_data/model/sparse_values.py | 5 + .../db_data/model/start_import_request.py | 5 + .../db_data/model/start_import_response.py | 5 + .../openapi/db_data/model/update_request.py | 5 + .../openapi/db_data/model/upsert_record.py | 5 + .../openapi/db_data/model/upsert_request.py | 5 + .../openapi/db_data/model/upsert_response.py | 5 + pinecone/core/openapi/db_data/model/usage.py | 5 + pinecone/core/openapi/db_data/model/vector.py | 5 + .../openapi/db_data/model/vector_values.py | 5 + .../openapi/inference/api/inference_api.py | 14 +- .../inference/model/dense_embedding.py | 19 +- .../core/openapi/inference/model/document.py | 5 + .../openapi/inference/model/embed_request.py | 5 + .../inference/model/embed_request_inputs.py | 5 + .../core/openapi/inference/model/embedding.py | 11 +- .../inference/model/embeddings_list.py | 5 + .../inference/model/embeddings_list_usage.py | 5 + .../openapi/inference/model/error_response.py | 5 + .../inference/model/error_response_error.py | 5 + .../openapi/inference/model/model_info.py | 10 +- .../inference/model/model_info_list.py | 5 + .../inference/model/model_info_metric.py | 5 + .../model/model_info_supported_metrics.py | 5 + .../model/model_info_supported_parameter.py | 5 + .../inference/model/ranked_document.py | 5 + .../openapi/inference/model/rerank_request.py | 5 + .../openapi/inference/model/rerank_result.py | 5 + .../inference/model/rerank_result_usage.py | 5 + .../inference/model/sparse_embedding.py | 19 +- .../openapi/inference/model/vector_type.py | 284 ------------------ .../core/openapi/inference/models/__init__.py | 1 - pinecone/inference/__init__.py | 1 + pinecone/inference/inference.py | 109 ++++++- pinecone/inference/inference_asyncio.py | 81 ++++- pinecone/inference/models/__init__.py | 4 + pinecone/inference/models/model_info.py | 43 +++ pinecone/inference/models/model_info_list.py | 57 ++++ pinecone/inference/resources/asyncio/model.py | 47 +++ pinecone/inference/resources/sync/model.py | 69 +++++ pinecone/openapi_support/api_version.py | 2 +- pinecone/openapi_support/model_utils.py | 7 +- pinecone/pinecone.py | 8 +- pinecone/pinecone_asyncio.py | 4 +- pinecone/scripts/repl.py | 4 + pinecone/utils/repr_overrides.py | 32 +- scripts/repl.py | 18 +- .../integration/inference/asyncio/__init__.py | 0 .../test_embeddings.py} | 127 +------- .../inference/asyncio/test_models.py | 88 ++++++ .../inference/asyncio/test_rerank.py | 125 ++++++++ tests/integration/inference/sync/__init__.py | 0 .../test_embeddings.py} | 120 +------- .../integration/inference/sync/test_models.py | 98 ++++++ .../integration/inference/sync/test_rerank.py | 118 ++++++++ 119 files changed, 1389 insertions(+), 584 deletions(-) delete mode 100644 pinecone/core/openapi/inference/model/vector_type.py create mode 100644 pinecone/inference/models/model_info.py create mode 100644 pinecone/inference/models/model_info_list.py create mode 100644 pinecone/inference/resources/asyncio/model.py create mode 100644 pinecone/inference/resources/sync/model.py create mode 100644 tests/integration/inference/asyncio/__init__.py rename tests/integration/inference/{test_asyncio_inference.py => asyncio/test_embeddings.py} (51%) create mode 100644 tests/integration/inference/asyncio/test_models.py create mode 100644 tests/integration/inference/asyncio/test_rerank.py create mode 100644 tests/integration/inference/sync/__init__.py rename tests/integration/inference/{test_inference.py => sync/test_embeddings.py} (51%) create mode 100644 tests/integration/inference/sync/test_models.py create mode 100644 tests/integration/inference/sync/test_rerank.py diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 6a66bb22..78adbf0e 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -18,6 +18,10 @@ _inference_lazy_imports = { "RerankModel": ("pinecone.inference", "RerankModel"), "EmbedModel": ("pinecone.inference", "EmbedModel"), + "ModelInfo": ("pinecone.inference.models", "ModelInfo"), + "ModelInfoList": ("pinecone.inference.models", "ModelInfoList"), + "EmbeddingsList": ("pinecone.inference.models", "EmbeddingsList"), + "RerankResult": ("pinecone.inference.models", "RerankResult"), } _db_data_lazy_imports = { diff --git a/pinecone/__init__.pyi b/pinecone/__init__.pyi index 249fe9d8..f6873468 100644 --- a/pinecone/__init__.pyi +++ b/pinecone/__init__.pyi @@ -1,8 +1,14 @@ from pinecone.config import Config from pinecone.config import ConfigBuilder from pinecone.config import PineconeConfig -from pinecone.inference import RerankModel -from pinecone.inference import EmbedModel +from pinecone.inference import ( + RerankModel, + EmbedModel, + ModelInfo, + ModelInfoList, + EmbeddingsList, + RerankResult, +) from pinecone.db_data.dataclasses import ( Vector, SparseValues, @@ -69,6 +75,10 @@ __all__ = [ # Inference classes "RerankModel", "EmbedModel", + "ModelInfo", + "ModelInfoList", + "EmbeddingsList", + "RerankResult", # Data classes "Vector", "SparseValues", diff --git a/pinecone/core/openapi/db_control/model/backup_list.py b/pinecone/core/openapi/db_control/model/backup_list.py index 89777f75..a7a637ad 100644 --- a/pinecone/core/openapi/db_control/model/backup_list.py +++ b/pinecone/core/openapi/db_control/model/backup_list.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pagination (PaginationResponse): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +239,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pagination (PaginationResponse): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -251,6 +255,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/backup_model.py b/pinecone/core/openapi/db_control/model/backup_model.py index fd7da209..817677e4 100644 --- a/pinecone/core/openapi/db_control/model/backup_model.py +++ b/pinecone/core/openapi/db_control/model/backup_model.py @@ -204,6 +204,7 @@ def _from_openapi_data( created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -221,6 +222,7 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -247,6 +249,7 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -312,6 +315,7 @@ def __init__( created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -327,6 +331,7 @@ def __init__( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/byoc_spec.py b/pinecone/core/openapi/db_control/model/byoc_spec.py index 9b693e78..a78279f2 100644 --- a/pinecone/core/openapi/db_control/model/byoc_spec.py +++ b/pinecone/core/openapi/db_control/model/byoc_spec.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -227,6 +230,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/collection_list.py b/pinecone/core/openapi/db_control/model/collection_list.py index 96e1632a..e0618662 100644 --- a/pinecone/core/openapi/db_control/model/collection_list.py +++ b/pinecone/core/openapi/db_control/model/collection_list.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/collection_model.py b/pinecone/core/openapi/db_control/model/collection_model.py index bafe08ab..5b3b9435 100644 --- a/pinecone/core/openapi/db_control/model/collection_model.py +++ b/pinecone/core/openapi/db_control/model/collection_model.py @@ -163,6 +163,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) vector_count (int): The number of records stored in the collection. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +181,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -203,6 +205,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -257,6 +260,7 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: vector_count (int): The number of records stored in the collection. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +276,7 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request.py b/pinecone/core/openapi/db_control/model/configure_index_request.py index aa2d7704..bc79fd9d 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request.py @@ -165,6 +165,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 embed (ConfigureIndexRequestEmbed): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +183,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -202,6 +204,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -252,6 +255,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 embed (ConfigureIndexRequestEmbed): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +271,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py index ce980b88..f63db37d 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +165,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +186,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py index ae9a76fa..840127a9 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py @@ -150,6 +150,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -167,6 +168,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -237,6 +240,7 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +256,7 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py index 67721ce4..adf0efd5 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py @@ -143,6 +143,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. [optional] if omitted the server will use the default value of "p1.x1". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +161,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +182,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. [optional] if omitted the server will use the default value of "p1.x1". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -243,6 +247,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py index bb79710d..df3ef581 100644 --- a/pinecone/core/openapi/db_control/model/create_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 description (str): A description of the backup. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -158,6 +159,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 description (str): A description of the backup. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -241,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_collection_request.py b/pinecone/core/openapi/db_control/model/create_collection_request.py index fe2807e2..f1b0e06f 100644 --- a/pinecone/core/openapi/db_control/model/create_collection_request.py +++ b/pinecone/core/openapi/db_control/model/create_collection_request.py @@ -145,6 +145,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +163,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +186,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +253,7 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py index d4331ba4..05a4b10f 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py @@ -173,6 +173,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs tags (IndexTags): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -190,6 +191,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -214,6 +216,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -268,6 +271,7 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa tags (IndexTags): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -283,6 +287,7 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py index 7d3f4f08..a70cce3e 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py @@ -157,6 +157,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -174,6 +175,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -265,6 +269,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py index 99f713b2..ef3127fa 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py @@ -158,6 +158,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 deletion_protection (DeletionProtection): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -175,6 +176,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -247,6 +250,7 @@ def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 deletion_protection (DeletionProtection): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -262,6 +266,7 @@ def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py index ee6e7f36..dc6bbbbd 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py @@ -143,6 +143,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +161,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -182,6 +184,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -232,6 +235,7 @@ def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -247,6 +251,7 @@ def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_request.py b/pinecone/core/openapi/db_control/model/create_index_request.py index ff9548b7..55a6096e 100644 --- a/pinecone/core/openapi/db_control/model/create_index_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_request.py @@ -175,6 +175,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -192,6 +193,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -214,6 +216,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -269,6 +272,7 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -284,6 +288,7 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/deletion_protection.py b/pinecone/core/openapi/db_control/model/deletion_protection.py index 3194d70e..4b67f67e 100644 --- a/pinecone/core/openapi/db_control/model/deletion_protection.py +++ b/pinecone/core/openapi/db_control/model/deletion_protection.py @@ -95,6 +95,7 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -167,12 +168,14 @@ def __init__(self, *args, **kwargs) -> None: if value is None: value = "disabled" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -253,12 +256,14 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: if value is None: value = "disabled" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response.py b/pinecone/core/openapi/db_control/model/error_response.py index a64ae9e2..886a9c39 100644 --- a/pinecone/core/openapi/db_control/model/error_response.py +++ b/pinecone/core/openapi/db_control/model/error_response.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +192,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response_error.py b/pinecone/core/openapi/db_control/model/error_response_error.py index 488c99b5..2cf453cc 100644 --- a/pinecone/core/openapi/db_control/model/error_response_error.py +++ b/pinecone/core/openapi/db_control/model/error_response_error.py @@ -169,6 +169,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -186,6 +187,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -208,6 +210,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -259,6 +262,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -274,6 +278,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_list.py b/pinecone/core/openapi/db_control/model/index_list.py index 99a4d6df..6b22bea3 100644 --- a/pinecone/core/openapi/db_control/model/index_list.py +++ b/pinecone/core/openapi/db_control/model/index_list.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model.py b/pinecone/core/openapi/db_control/model/index_model.py index 2e5c6224..5ded19c3 100644 --- a/pinecone/core/openapi/db_control/model/index_model.py +++ b/pinecone/core/openapi/db_control/model/index_model.py @@ -189,6 +189,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** """ vector_type = kwargs.get("vector_type", "dense") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -206,6 +207,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -232,6 +234,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -291,6 +294,7 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: """ vector_type = kwargs.get("vector_type", "dense") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -306,6 +310,7 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index 574deea1..1b5cd871 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -156,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +174,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +195,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -242,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -257,6 +261,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_status.py b/pinecone/core/openapi/db_control/model/index_model_status.py index 2379c764..d549526d 100644 --- a/pinecone/core/openapi/db_control/model/index_model_status.py +++ b/pinecone/core/openapi/db_control/model/index_model_status.py @@ -155,6 +155,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -172,6 +173,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index 9e0cc24b..d0acd19c 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -149,6 +149,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 byoc (ByocSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -166,6 +167,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -235,6 +238,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 byoc (ByocSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_tags.py b/pinecone/core/openapi/db_control/model/index_tags.py index bee527a5..c71dc4f9 100644 --- a/pinecone/core/openapi/db_control/model/index_tags.py +++ b/pinecone/core/openapi/db_control/model/index_tags.py @@ -133,6 +133,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -150,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -170,6 +172,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -216,6 +219,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -231,6 +235,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/model_index_embed.py b/pinecone/core/openapi/db_control/model/model_index_embed.py index 1e358059..c47df155 100644 --- a/pinecone/core/openapi/db_control/model/model_index_embed.py +++ b/pinecone/core/openapi/db_control/model/model_index_embed.py @@ -162,6 +162,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +180,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +202,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -255,6 +258,7 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -270,6 +274,7 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pagination_response.py b/pinecone/core/openapi/db_control/model/pagination_response.py index 3a0182a5..1942c2c5 100644 --- a/pinecone/core/openapi/db_control/model/pagination_response.py +++ b/pinecone/core/openapi/db_control/model/pagination_response.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -227,6 +230,7 @@ def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pod_spec.py b/pinecone/core/openapi/db_control/model/pod_spec.py index a0171c8c..d19d2645 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec.py +++ b/pinecone/core/openapi/db_control/model/pod_spec.py @@ -173,6 +173,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa """ pod_type = kwargs.get("pod_type", "p1.x1") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -190,6 +191,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -212,6 +214,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -268,6 +271,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 """ pod_type = kwargs.get("pod_type", "p1.x1") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -283,6 +287,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py index d8df2bc9..02b855a1 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py +++ b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/restore_job_list.py b/pinecone/core/openapi/db_control/model/restore_job_list.py index fb66e233..bcf4bd62 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_list.py +++ b/pinecone/core/openapi/db_control/model/restore_job_list.py @@ -153,6 +153,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 pagination (PaginationResponse): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -241,6 +244,7 @@ def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 pagination (PaginationResponse): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -256,6 +260,7 @@ def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/restore_job_model.py b/pinecone/core/openapi/db_control/model/restore_job_model.py index 8f541188..bc75aba4 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_model.py +++ b/pinecone/core/openapi/db_control/model/restore_job_model.py @@ -173,6 +173,7 @@ def _from_openapi_data( percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -190,6 +191,7 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -216,6 +218,7 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -282,6 +285,7 @@ def __init__( percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -297,6 +301,7 @@ def __init__( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/serverless_spec.py b/pinecone/core/openapi/db_control/model/serverless_spec.py index 20ac901f..595ad811 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec.py @@ -145,6 +145,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +163,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +186,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +253,7 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index 48855458..1f2abd25 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +165,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +186,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index 3ea3bb6d..059c79a8 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/fetch_response.py b/pinecone/core/openapi/db_data/model/fetch_response.py index f662b596..b3d23ef9 100644 --- a/pinecone/core/openapi/db_data/model/fetch_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_response.py @@ -154,6 +154,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/hit.py b/pinecone/core/openapi/db_data/model/hit.py index b8e16de9..c6407d2f 100644 --- a/pinecone/core/openapi/db_data/model/hit.py +++ b/pinecone/core/openapi/db_data/model/hit.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -237,6 +240,7 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +256,7 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_error_mode.py b/pinecone/core/openapi/db_data/model/import_error_mode.py index 7bda43b0..66bd3fd0 100644 --- a/pinecone/core/openapi/db_data/model/import_error_mode.py +++ b/pinecone/core/openapi/db_data/model/import_error_mode.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 on_error (str): Indicates how to respond to errors during the import process. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +227,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 on_error (str): Indicates how to respond to errors during the import process. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_model.py b/pinecone/core/openapi/db_data/model/import_model.py index 4d990886..4c87e189 100644 --- a/pinecone/core/openapi/db_data/model/import_model.py +++ b/pinecone/core/openapi/db_data/model/import_model.py @@ -170,6 +170,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 error (str): The error message if the import process failed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -187,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -207,6 +209,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -261,6 +264,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 error (str): The error message if the import process failed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -276,6 +280,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/index_description.py b/pinecone/core/openapi/db_data/model/index_description.py index 8fa8aa1f..af438c59 100644 --- a/pinecone/core/openapi/db_data/model/index_description.py +++ b/pinecone/core/openapi/db_data/model/index_description.py @@ -161,6 +161,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 vector_type (str): The type of vectors stored in the index. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -178,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +200,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 vector_type (str): The type of vectors stored in the index. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -265,6 +269,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_imports_response.py b/pinecone/core/openapi/db_data/model/list_imports_response.py index 254de580..ede53ad1 100644 --- a/pinecone/core/openapi/db_data/model/list_imports_response.py +++ b/pinecone/core/openapi/db_data/model/list_imports_response.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +239,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -251,6 +255,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_item.py b/pinecone/core/openapi/db_data/model/list_item.py index 14d77869..eed68e6d 100644 --- a/pinecone/core/openapi/db_data/model/list_item.py +++ b/pinecone/core/openapi/db_data/model/list_item.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 id (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 id (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_namespaces_response.py b/pinecone/core/openapi/db_data/model/list_namespaces_response.py index 3d75a727..1699d04a 100644 --- a/pinecone/core/openapi/db_data/model/list_namespaces_response.py +++ b/pinecone/core/openapi/db_data/model/list_namespaces_response.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +239,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -251,6 +255,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_response.py b/pinecone/core/openapi/db_data/model/list_response.py index c4e44746..4e28ce18 100644 --- a/pinecone/core/openapi/db_data/model/list_response.py +++ b/pinecone/core/openapi/db_data/model/list_response.py @@ -159,6 +159,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +265,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/namespace_description.py b/pinecone/core/openapi/db_data/model/namespace_description.py index 099b14b2..4dcde11c 100644 --- a/pinecone/core/openapi/db_data/model/namespace_description.py +++ b/pinecone/core/openapi/db_data/model/namespace_description.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -158,6 +159,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -241,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/namespace_summary.py b/pinecone/core/openapi/db_data/model/namespace_summary.py index e9f8080c..15800b09 100644 --- a/pinecone/core/openapi/db_data/model/namespace_summary.py +++ b/pinecone/core/openapi/db_data/model/namespace_summary.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/pagination.py b/pinecone/core/openapi/db_data/model/pagination.py index 14cb2301..89d07865 100644 --- a/pinecone/core/openapi/db_data/model/pagination.py +++ b/pinecone/core/openapi/db_data/model/pagination.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 next (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 next (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_any.py b/pinecone/core/openapi/db_data/model/protobuf_any.py index aebbe361..218f294f 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_any.py +++ b/pinecone/core/openapi/db_data/model/protobuf_any.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 value (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -158,6 +159,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 value (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -241,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_null_value.py b/pinecone/core/openapi/db_data/model/protobuf_null_value.py index b46f14d2..ed47d38d 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_null_value.py +++ b/pinecone/core/openapi/db_data/model/protobuf_null_value.py @@ -95,6 +95,7 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -167,12 +168,14 @@ def __init__(self, *args, **kwargs) -> None: if value is None: value = "NULL_VALUE" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -253,12 +256,14 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: if value is None: value = "NULL_VALUE" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index f1aaf07e..41ab6a52 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -179,6 +179,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -196,6 +197,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -217,6 +219,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -274,6 +277,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -289,6 +293,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_response.py b/pinecone/core/openapi/db_data/model/query_response.py index 937de236..aaabb0bd 100644 --- a/pinecone/core/openapi/db_data/model/query_response.py +++ b/pinecone/core/openapi/db_data/model/query_response.py @@ -159,6 +159,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +265,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_vector.py b/pinecone/core/openapi/db_data/model/query_vector.py index 12caef5d..903ce62a 100644 --- a/pinecone/core/openapi/db_data/model/query_vector.py +++ b/pinecone/core/openapi/db_data/model/query_vector.py @@ -163,6 +163,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +181,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +257,7 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -269,6 +273,7 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/rpc_status.py b/pinecone/core/openapi/db_data/model/rpc_status.py index 30196b58..cb6c7d24 100644 --- a/pinecone/core/openapi/db_data/model/rpc_status.py +++ b/pinecone/core/openapi/db_data/model/rpc_status.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +170,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -253,6 +257,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/scored_vector.py b/pinecone/core/openapi/db_data/model/scored_vector.py index 949f4e2a..3248def5 100644 --- a/pinecone/core/openapi/db_data/model/scored_vector.py +++ b/pinecone/core/openapi/db_data/model/scored_vector.py @@ -162,6 +162,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +180,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +202,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -253,6 +256,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -268,6 +272,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request.py b/pinecone/core/openapi/db_data/model/search_records_request.py index df5fcd23..a85244af 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request.py +++ b/pinecone/core/openapi/db_data/model/search_records_request.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +178,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +200,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -249,6 +252,7 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index caa7ee40..659cc057 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +178,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +200,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -251,6 +254,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +270,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index 2684894a..bc80d08b 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +170,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response.py b/pinecone/core/openapi/db_data/model/search_records_response.py index 95958624..a46aa0c4 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response.py +++ b/pinecone/core/openapi/db_data/model/search_records_response.py @@ -155,6 +155,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -172,6 +173,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response_result.py b/pinecone/core/openapi/db_data/model/search_records_response_result.py index 87b0a5c6..f479c0a4 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response_result.py +++ b/pinecone/core/openapi/db_data/model/search_records_response_result.py @@ -148,6 +148,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -235,6 +238,7 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_vector.py b/pinecone/core/openapi/db_data/model/search_records_vector.py index e5d551a5..81076824 100644 --- a/pinecone/core/openapi/db_data/model/search_records_vector.py +++ b/pinecone/core/openapi/db_data/model/search_records_vector.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +170,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -253,6 +257,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_usage.py b/pinecone/core/openapi/db_data/model/search_usage.py index dc9217a8..ca7cda7c 100644 --- a/pinecone/core/openapi/db_data/model/search_usage.py +++ b/pinecone/core/openapi/db_data/model/search_usage.py @@ -150,6 +150,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -167,6 +168,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +242,7 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_vector.py b/pinecone/core/openapi/db_data/model/search_vector.py index 88f18151..74ca49d4 100644 --- a/pinecone/core/openapi/db_data/model/search_vector.py +++ b/pinecone/core/openapi/db_data/model/search_vector.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 values (VectorValues): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 values (VectorValues): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/single_query_results.py b/pinecone/core/openapi/db_data/model/single_query_results.py index bfc1bb2f..9c790616 100644 --- a/pinecone/core/openapi/db_data/model/single_query_results.py +++ b/pinecone/core/openapi/db_data/model/single_query_results.py @@ -149,6 +149,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 namespace (str): The namespace for the vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -166,6 +167,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace for the vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/sparse_values.py b/pinecone/core/openapi/db_data/model/sparse_values.py index c7f48e74..6b5acd85 100644 --- a/pinecone/core/openapi/db_data/model/sparse_values.py +++ b/pinecone/core/openapi/db_data/model/sparse_values.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -185,6 +187,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -235,6 +238,7 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index e0829047..f8b4602a 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -157,6 +157,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 error_mode (ImportErrorMode): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -174,6 +175,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -195,6 +197,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +249,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 error_mode (ImportErrorMode): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +265,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_response.py b/pinecone/core/openapi/db_data/model/start_import_response.py index 37da3ba6..0eba3dd9 100644 --- a/pinecone/core/openapi/db_data/model/start_import_response.py +++ b/pinecone/core/openapi/db_data/model/start_import_response.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 id (str): Unique identifier for the import operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +227,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 id (str): Unique identifier for the import operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/update_request.py b/pinecone/core/openapi/db_data/model/update_request.py index bd937ebd..b2a879d5 100644 --- a/pinecone/core/openapi/db_data/model/update_request.py +++ b/pinecone/core/openapi/db_data/model/update_request.py @@ -163,6 +163,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +181,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +257,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -269,6 +273,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_record.py b/pinecone/core/openapi/db_data/model/upsert_record.py index 8ae7a048..65739cc4 100644 --- a/pinecone/core/openapi/db_data/model/upsert_record.py +++ b/pinecone/core/openapi/db_data/model/upsert_record.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -227,6 +230,7 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index 19058a3b..b5bc0b77 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 namespace (str): The namespace where you upsert vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +242,7 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace where you upsert vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_response.py b/pinecone/core/openapi/db_data/model/upsert_response.py index e4da7d30..98fe68aa 100644 --- a/pinecone/core/openapi/db_data/model/upsert_response.py +++ b/pinecone/core/openapi/db_data/model/upsert_response.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 upserted_count (int): The number of vectors upserted. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 upserted_count (int): The number of vectors upserted. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/usage.py b/pinecone/core/openapi/db_data/model/usage.py index a8ab222f..d8b02b78 100644 --- a/pinecone/core/openapi/db_data/model/usage.py +++ b/pinecone/core/openapi/db_data/model/usage.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 read_units (int): The number of read units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 read_units (int): The number of read units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector.py b/pinecone/core/openapi/db_data/model/vector.py index de061b7d..ddda25d3 100644 --- a/pinecone/core/openapi/db_data/model/vector.py +++ b/pinecone/core/openapi/db_data/model/vector.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +178,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +200,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -265,6 +269,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector_values.py b/pinecone/core/openapi/db_data/model/vector_values.py index acaade88..ad32376f 100644 --- a/pinecone/core/openapi/db_data/model/vector_values.py +++ b/pinecone/core/openapi/db_data/model/vector_values.py @@ -93,6 +93,7 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -169,12 +170,14 @@ def __init__(self, *args, **kwargs) -> None: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -259,12 +262,14 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index a19557f8..5c9d2efe 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -231,15 +231,12 @@ def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): "all": ["type", "vector_type"], "required": [], "nullable": [], - "enum": ["type", "vector_type"], + "enum": [], "validation": [], }, root_map={ "validations": {}, - "allowed_values": { - ("type",): {"EMBED": "embed", "RERANK": "rerank"}, - ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, - }, + "allowed_values": {}, "openapi_types": {"type": (str,), "vector_type": (str,)}, "attribute_map": {"type": "type", "vector_type": "vector_type"}, "location_map": {"type": "query", "vector_type": "query"}, @@ -496,15 +493,12 @@ async def __list_models(self, **kwargs): "all": ["type", "vector_type"], "required": [], "nullable": [], - "enum": ["type", "vector_type"], + "enum": [], "validation": [], }, root_map={ "validations": {}, - "allowed_values": { - ("type",): {"EMBED": "embed", "RERANK": "rerank"}, - ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, - }, + "allowed_values": {}, "openapi_types": {"type": (str,), "vector_type": (str,)}, "attribute_map": {"type": "type", "vector_type": "vector_type"}, "location_map": {"type": "query", "vector_type": "query"}, diff --git a/pinecone/core/openapi/inference/model/dense_embedding.py b/pinecone/core/openapi/inference/model/dense_embedding.py index ec45c7bd..40ec736a 100644 --- a/pinecone/core/openapi/inference/model/dense_embedding.py +++ b/pinecone/core/openapi/inference/model/dense_embedding.py @@ -27,12 +27,6 @@ from pinecone.openapi_support.exceptions import PineconeApiAttributeError -def lazy_import(): - from pinecone.core.openapi.inference.model.vector_type import VectorType - - globals()["VectorType"] = VectorType - - from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property @@ -75,7 +69,6 @@ def additional_properties_type(cls): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, dict, float, int, list, str, none_type) # noqa: E501 _nullable = False @@ -90,10 +83,9 @@ def openapi_types(cls): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { "values": ([float],), # noqa: E501 - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 } @cached_class_property @@ -116,7 +108,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: Args: values ([float]): The dense embedding values. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -151,6 +143,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +161,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +184,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -205,7 +200,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 Args: values ([float]): The dense embedding values. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -240,6 +235,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +251,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/document.py b/pinecone/core/openapi/inference/model/document.py index 99b88161..f6897555 100644 --- a/pinecone/core/openapi/inference/model/document.py +++ b/pinecone/core/openapi/inference/model/document.py @@ -133,6 +133,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -150,6 +151,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -170,6 +172,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -216,6 +219,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -231,6 +235,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index ba7e786b..0b71a114 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -154,6 +154,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +195,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request_inputs.py b/pinecone/core/openapi/inference/model/embed_request_inputs.py index 1eda05b0..b1d26258 100644 --- a/pinecone/core/openapi/inference/model/embed_request_inputs.py +++ b/pinecone/core/openapi/inference/model/embed_request_inputs.py @@ -138,6 +138,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 text (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +156,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +177,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +225,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 text (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embedding.py b/pinecone/core/openapi/inference/model/embedding.py index 72fd9b2d..799dd9ea 100644 --- a/pinecone/core/openapi/inference/model/embedding.py +++ b/pinecone/core/openapi/inference/model/embedding.py @@ -30,11 +30,9 @@ def lazy_import(): from pinecone.core.openapi.inference.model.dense_embedding import DenseEmbedding from pinecone.core.openapi.inference.model.sparse_embedding import SparseEmbedding - from pinecone.core.openapi.inference.model.vector_type import VectorType globals()["DenseEmbedding"] = DenseEmbedding globals()["SparseEmbedding"] = SparseEmbedding - globals()["VectorType"] = VectorType from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar @@ -96,7 +94,7 @@ def openapi_types(cls): """ lazy_import() return { - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 "sparse_tokens": ([str],), # noqa: E501 "values": ([float],), # noqa: E501 "sparse_values": ([float],), # noqa: E501 @@ -132,7 +130,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """Embedding - a model defined in OpenAPI Keyword Args: - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -220,6 +218,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -237,7 +236,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """Embedding - a model defined in OpenAPI Keyword Args: - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -274,6 +273,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -289,6 +289,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list.py b/pinecone/core/openapi/inference/model/embeddings_list.py index 2c4a4edb..4c25f9f7 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list.py +++ b/pinecone/core/openapi/inference/model/embeddings_list.py @@ -159,6 +159,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +177,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +202,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -252,6 +255,7 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +271,7 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list_usage.py b/pinecone/core/openapi/inference/model/embeddings_list_usage.py index ebfb2ce6..901df955 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list_usage.py +++ b/pinecone/core/openapi/inference/model/embeddings_list_usage.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 total_tokens (int): Total number of tokens consumed across all inputs. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +227,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 total_tokens (int): Total number of tokens consumed across all inputs. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response.py b/pinecone/core/openapi/inference/model/error_response.py index e08f8449..f7657fae 100644 --- a/pinecone/core/openapi/inference/model/error_response.py +++ b/pinecone/core/openapi/inference/model/error_response.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +192,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response_error.py b/pinecone/core/openapi/inference/model/error_response_error.py index b864fab6..0984cbea 100644 --- a/pinecone/core/openapi/inference/model/error_response_error.py +++ b/pinecone/core/openapi/inference/model/error_response_error.py @@ -167,6 +167,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -184,6 +185,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -206,6 +208,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -257,6 +260,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +276,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py index 6daa37b3..9e4b076d 100644 --- a/pinecone/core/openapi/inference/model/model_info.py +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -71,10 +71,7 @@ class ModelInfo(ModelNormal): _data_store: Dict[str, Any] _check_type: bool - allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { - ("type",): {"EMBED": "embed", "RERANK": "rerank"}, - ("vector_type",): {"DENSE": "dense", "SPARSE": "sparse"}, - } + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { ("default_dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1}, @@ -196,6 +193,7 @@ def _from_openapi_data( supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -213,6 +211,7 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -237,6 +236,7 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -299,6 +299,7 @@ def __init__( supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -314,6 +315,7 @@ def __init__( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_list.py b/pinecone/core/openapi/inference/model/model_info_list.py index 9d176c0f..2c2ca496 100644 --- a/pinecone/core/openapi/inference/model/model_info_list.py +++ b/pinecone/core/openapi/inference/model/model_info_list.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 models ([ModelInfo]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +164,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 models ([ModelInfo]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_metric.py b/pinecone/core/openapi/inference/model/model_info_metric.py index 85dd0457..e84c8e19 100644 --- a/pinecone/core/openapi/inference/model/model_info_metric.py +++ b/pinecone/core/openapi/inference/model/model_info_metric.py @@ -95,6 +95,7 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -171,12 +172,14 @@ def __init__(self, *args, **kwargs) -> None: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -261,12 +264,14 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py index 96b2bb41..63c3e2fd 100644 --- a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py +++ b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py @@ -101,6 +101,7 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -177,12 +178,14 @@ def __init__(self, *args, **kwargs) -> None: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -267,12 +270,14 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py index 56b17ac3..3d889cad 100644 --- a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py +++ b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py @@ -163,6 +163,7 @@ def _from_openapi_data( default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +181,7 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -204,6 +206,7 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -260,6 +263,7 @@ def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> No default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -275,6 +279,7 @@ def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> No ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/ranked_document.py b/pinecone/core/openapi/inference/model/ranked_document.py index be8699dc..6ad8bbbb 100644 --- a/pinecone/core/openapi/inference/model/ranked_document.py +++ b/pinecone/core/openapi/inference/model/ranked_document.py @@ -154,6 +154,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq document (Document): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +195,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 document (Document): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index d8ce884b..cb4fd6d4 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -166,6 +166,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -183,6 +184,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -206,6 +208,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -261,6 +264,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -276,6 +280,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result.py b/pinecone/core/openapi/inference/model/rerank_result.py index ee9e6fa6..53f56923 100644 --- a/pinecone/core/openapi/inference/model/rerank_result.py +++ b/pinecone/core/openapi/inference/model/rerank_result.py @@ -156,6 +156,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +174,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -247,6 +250,7 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -262,6 +266,7 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result_usage.py b/pinecone/core/openapi/inference/model/rerank_result_usage.py index ad24c7a7..4df237ab 100644 --- a/pinecone/core/openapi/inference/model/rerank_result_usage.py +++ b/pinecone/core/openapi/inference/model/rerank_result_usage.py @@ -140,6 +140,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +227,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/sparse_embedding.py b/pinecone/core/openapi/inference/model/sparse_embedding.py index f3b34b53..6a570c44 100644 --- a/pinecone/core/openapi/inference/model/sparse_embedding.py +++ b/pinecone/core/openapi/inference/model/sparse_embedding.py @@ -27,12 +27,6 @@ from pinecone.openapi_support.exceptions import PineconeApiAttributeError -def lazy_import(): - from pinecone.core.openapi.inference.model.vector_type import VectorType - - globals()["VectorType"] = VectorType - - from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property @@ -75,7 +69,6 @@ def additional_properties_type(cls): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, dict, float, int, list, str, none_type) # noqa: E501 _nullable = False @@ -90,11 +83,10 @@ def openapi_types(cls): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { "sparse_values": ([float],), # noqa: E501 "sparse_indices": ([int],), # noqa: E501 - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 "sparse_tokens": ([str],), # noqa: E501 } @@ -123,7 +115,7 @@ def _from_openapi_data( Args: sparse_values ([float]): The sparse embedding values. sparse_indices ([int]): The sparse embedding indices. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -159,6 +151,7 @@ def _from_openapi_data( sparse_tokens ([str]): The normalized tokens used to create the sparse embedding. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +169,7 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +193,7 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", "_data_store", "_check_type", "_spec_property_naming", @@ -215,7 +210,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) Args: sparse_values ([float]): The sparse embedding values. sparse_indices ([int]): The sparse embedding indices. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,6 +246,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) sparse_tokens ([str]): The normalized tokens used to create the sparse embedding. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +262,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/vector_type.py b/pinecone/core/openapi/inference/model/vector_type.py deleted file mode 100644 index 3985d043..00000000 --- a/pinecone/core/openapi/inference/model/vector_type.py +++ /dev/null @@ -1,284 +0,0 @@ -""" -Pinecone Inference API - -Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 - -This file is @generated using OpenAPI. - -The version of the OpenAPI document: 2025-04 -Contact: support@pinecone.io -""" - -from pinecone.openapi_support.model_utils import ( # noqa: F401 - PineconeApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - OpenApiModel, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from pinecone.openapi_support.exceptions import PineconeApiAttributeError - - -from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar -from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property - -T = TypeVar("T", bound="VectorType") - - -class VectorType(ModelSimple): - """NOTE: This class is @generated using OpenAPI. - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - _data_store: Dict[str, Any] - _check_type: bool - - allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { - ("value",): {"DENSE": "dense", "SPARSE": "sparse"} - } - - validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} - - @cached_class_property - def additional_properties_type(cls): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return (bool, dict, float, int, list, str, none_type) # noqa: E501 - - _nullable = False - - @cached_class_property - def openapi_types(cls): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return {"value": (str,)} - - @cached_class_property - def discriminator(cls): - return None - - attribute_map: Dict[str, str] = {} - - read_only_vars: Set[str] = set() - - _composed_schemas = None - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs) -> None: - """VectorType - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] (str): Indicates whether this is a 'dense' or 'sparse' embedding.., must be one of ["dense", "sparse", ] # noqa: E501 - - Keyword Args: - value (str): Indicates whether this is a 'dense' or 'sparse' embedding.., must be one of ["dense", "sparse", ] # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop("_path_to_item", ()) - - value = None - if "value" in kwargs: - value = kwargs.pop("value") - - if value is None and args: - if len(args) == 1: - value = args[0] - elif len(args) > 1: - raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % (args, self.__class__.__name__), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - if value is None: - raise PineconeApiTypeError( - "value is required, but not passed in args or kwargs and doesn't have default", - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise PineconeApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." - % (kwargs, self.__class__.__name__), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: - """VectorType - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] (str): Indicates whether this is a 'dense' or 'sparse' embedding., must be one of ["dense", "sparse", ] # noqa: E501 - - Keyword Args: - value (str): Indicates whether this is a 'dense' or 'sparse' embedding., must be one of ["dense", "sparse", ] # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop("_path_to_item", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - value = None - if "value" in kwargs: - value = kwargs.pop("value") - - if value is None and args: - if len(args) == 1: - value = args[0] - elif len(args) > 1: - raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % (args, self.__class__.__name__), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - if value is None: - raise PineconeApiTypeError( - "value is required, but not passed in args or kwargs and doesn't have default", - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise PineconeApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." - % (kwargs, self.__class__.__name__), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - return self diff --git a/pinecone/core/openapi/inference/models/__init__.py b/pinecone/core/openapi/inference/models/__init__.py index f338afe2..d68ec1ff 100644 --- a/pinecone/core/openapi/inference/models/__init__.py +++ b/pinecone/core/openapi/inference/models/__init__.py @@ -32,4 +32,3 @@ from pinecone.core.openapi.inference.model.rerank_result import RerankResult from pinecone.core.openapi.inference.model.rerank_result_usage import RerankResultUsage from pinecone.core.openapi.inference.model.sparse_embedding import SparseEmbedding -from pinecone.core.openapi.inference.model.vector_type import VectorType diff --git a/pinecone/inference/__init__.py b/pinecone/inference/__init__.py index 30e93330..235cbc69 100644 --- a/pinecone/inference/__init__.py +++ b/pinecone/inference/__init__.py @@ -2,5 +2,6 @@ from .inference import Inference from .inference_asyncio import AsyncioInference from .inference_request_builder import RerankModel, EmbedModel +from .models import ModelInfo, ModelInfoList, EmbeddingsList, RerankResult install_repl_overrides() diff --git a/pinecone/inference/inference.py b/pinecone/inference/inference.py index 62e6cbcd..53a52aa3 100644 --- a/pinecone/inference/inference.py +++ b/pinecone/inference/inference.py @@ -7,6 +7,7 @@ from .models import EmbeddingsList, RerankResult from pinecone.core.openapi.inference import API_VERSION from pinecone.utils import setup_openapi_client, PluginAware +from pinecone.utils import require_kwargs from .inference_request_builder import ( InferenceRequestBuilder, @@ -16,6 +17,8 @@ if TYPE_CHECKING: from pinecone.config import Config, OpenApiConfiguration + from .resources.sync.model import Model as ModelResource + from .models import ModelInfo, ModelInfoList logger = logging.getLogger(__name__) """ @private """ @@ -47,14 +50,20 @@ class Inference(PluginAware): EmbedModel = EmbedModelEnum RerankModel = RerankModelEnum - def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration", **kwargs) -> None: + def __init__( + self, + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int = 1, + **kwargs, + ) -> None: self._config = config """ @private """ self._openapi_config = openapi_config """ @private """ - self._pool_threads = kwargs.get("pool_threads", 1) + self._pool_threads = pool_threads """ @private """ self.__inference_api = setup_openapi_client( @@ -66,6 +75,9 @@ def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration", **k api_version=API_VERSION, ) + self._model: Optional["ModelResource"] = None # Lazy initialization + """ @private """ + super().__init__() # Initialize PluginAware @property @@ -95,6 +107,45 @@ def pool_threads(self) -> int: ) return self._pool_threads + @property + def model(self) -> "ModelResource": + """ + Model is a resource that describes models available in the Pinecone Inference API. + + Curently you can get or list models. + + ```python + pc = Pinecone() + + # List all models + models = pc.inference.model.list() + + # List models, with model type filtering + models = pc.inference.model.list(type="embed") + models = pc.inference.model.list(type="rerank") + + # List models, with vector type filtering + models = pc.inference.model.list(vector_type="dense") + models = pc.inference.model.list(vector_type="sparse") + + # List models, with both type and vector type filtering + models = pc.inference.model.list(type="rerank", vector_type="dense") + + # Get details on a specific model + model = pc.inference.model.get("text-embedding-3-small") + ``` + """ + if self._model is None: + from .resources.sync.model import Model as ModelResource + + self._model = ModelResource( + inference_api=self.__inference_api, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._model + def embed( self, model: Union[EmbedModelEnum, str], @@ -214,3 +265,57 @@ def rerank( ) resp = self.__inference_api.rerank(rerank_request=rerank_request) return RerankResult(resp) + + @require_kwargs + def list_models( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> "ModelInfoList": + """ + List all available models. + + + ```python + pc = Pinecone() + + # List all models + models = pc.inference.list_models() + + # List models, with model type filtering + models = pc.inference.list_models(type="embed") + models = pc.inference.list_models(type="rerank") + + # List models, with vector type filtering + models = pc.inference.list_models(vector_type="dense") + models = pc.inference.list_models(vector_type="sparse") + + # List models, with both type and vector type filtering + models = pc.inference.list_models(type="rerank", vector_type="dense") + ``` + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + return self.model.list(type=type, vector_type=vector_type) + + @require_kwargs + def get_model(self, model_name: str) -> "ModelInfo": + """ + Get details on a specific model. + + ```python + pc = Pinecone() + + model = pc.inference.get_model(model_name="text-embedding-3-small") + ``` + + :param model_name: The name of the model to get details on. + :type model_name: str, required + + :return: A ModelInfo object. + """ + return self.model.get(model_name=model_name) diff --git a/pinecone/inference/inference_asyncio.py b/pinecone/inference/inference_asyncio.py index 06ec7388..65ec8e79 100644 --- a/pinecone/inference/inference_asyncio.py +++ b/pinecone/inference/inference_asyncio.py @@ -1,7 +1,8 @@ -from typing import Optional, Dict, List, Union, Any +from typing import Optional, Dict, List, Union, Any, TYPE_CHECKING from pinecone.core.openapi.inference.api.inference_api import AsyncioInferenceApi -from .models import EmbeddingsList, RerankResult +from .models import EmbeddingsList, RerankResult, ModelInfoList, ModelInfo +from pinecone.utils import require_kwargs, parse_non_empty_args from .inference_request_builder import ( InferenceRequestBuilder, @@ -9,6 +10,9 @@ RerankModel as RerankModelEnum, ) +if TYPE_CHECKING: + from .resources.asyncio.model import ModelAsyncio as ModelAsyncioResource + class AsyncioInference: """ @@ -40,6 +44,9 @@ def __init__(self, api_client, **kwargs) -> None: self.api_client = api_client """ @private """ + self._model: Optional["ModelAsyncioResource"] = None + """ @private """ + self.__inference_api = AsyncioInferenceApi(api_client) """ @private """ @@ -84,6 +91,39 @@ async def embed( resp = await self.__inference_api.embed(embed_request=request_body) return EmbeddingsList(resp) + @property + def model(self) -> "ModelAsyncioResource": + """ + Model is a resource that describes models available in the Pinecone Inference API. + + Curently you can get or list models. + + ```python + async with PineconeAsyncio() as pc: + # List all models + models = await pc.inference.model.list() + + # List models, with model type filtering + models = await pc.inference.model.list(type="embed") + models = await pc.inference.model.list(type="rerank") + + # List models, with vector type filtering + models = await pc.inference.model.list(vector_type="dense") + models = await pc.inference.model.list(vector_type="sparse") + + # List models, with both type and vector type filtering + models = await pc.inference.model.list(type="rerank", vector_type="dense") + + # Get details on a specific model + model = await pc.inference.model.get("text-embedding-3-small") + ``` + """ + if self._model is None: + from .resources.asyncio.model import ModelAsyncio as ModelAsyncioResource + + self._model = ModelAsyncioResource(inference_api=self.__inference_api) + return self._model + async def rerank( self, model: str, @@ -162,3 +202,40 @@ async def rerank( ) resp = await self.__inference_api.rerank(rerank_request=rerank_request) return RerankResult(resp) + + @require_kwargs + async def list_models( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + resp = await self.__inference_api.list_models(**args) + return ModelInfoList(resp) + + @require_kwargs + async def get_model(self, model_name: str) -> ModelInfo: + """ + Get details on a specific model. + + ```python + async with PineconeAsyncio() as pc: + model = await pc.inference.get_model(model_name="text-embedding-3-small") + ``` + + :param model_name: The name of the model to get details on. + :type model_name: str, required + + :return: A ModelInfo object. + """ + resp = await self.__inference_api.get_model(model_name=model_name) + return ModelInfo(resp) diff --git a/pinecone/inference/models/__init__.py b/pinecone/inference/models/__init__.py index b9a18aeb..11056408 100644 --- a/pinecone/inference/models/__init__.py +++ b/pinecone/inference/models/__init__.py @@ -1,2 +1,6 @@ from .embedding_list import EmbeddingsList from .rerank_result import RerankResult +from .model_info import ModelInfo +from .model_info_list import ModelInfoList + +__all__ = ["EmbeddingsList", "RerankResult", "ModelInfo", "ModelInfoList"] diff --git a/pinecone/inference/models/model_info.py b/pinecone/inference/models/model_info.py new file mode 100644 index 00000000..c8e37f21 --- /dev/null +++ b/pinecone/inference/models/model_info.py @@ -0,0 +1,43 @@ +import json +from pinecone.utils.repr_overrides import custom_serializer, install_json_repr_override +from pinecone.core.openapi.inference.model.model_info import ModelInfo as OpenAPIModelInfo +from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter as OpenAPIModelInfoSupportedParameter, +) + +for klass in [ + # OpenAPIModelInfo, + # OpenAPIModelInfoMetric, + OpenAPIModelInfoSupportedParameter + # OpenAPIModelInfoSupportedMetrics, +]: + install_json_repr_override(klass) + + +class ModelInfo: + def __init__(self, model_info: OpenAPIModelInfo): + self._model_info = model_info + if self._model_info.supported_metrics is not None: + self.supported_metrics = [sm.value for sm in self._model_info.supported_metrics.value] + else: + self.supported_metrics = [] + + def __str__(self): + return str(self._model_info) + + def __getattr__(self, attr): + if attr == "supported_metrics": + return self.supported_metrics + else: + return getattr(self._model_info, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + raw = self._model_info.to_dict() + raw["supported_metrics"] = self.supported_metrics + return raw diff --git a/pinecone/inference/models/model_info_list.py b/pinecone/inference/models/model_info_list.py new file mode 100644 index 00000000..01d2f2c5 --- /dev/null +++ b/pinecone/inference/models/model_info_list.py @@ -0,0 +1,57 @@ +import json +from typing import List +from pinecone.core.openapi.inference.model.model_info_list import ( + ModelInfoList as OpenAPIModelInfoList, +) +from .model_info import ModelInfo +from pinecone.utils.repr_overrides import custom_serializer + + +class ModelInfoList: + """ + A list of model information. + """ + + def __init__(self, model_info_list: OpenAPIModelInfoList): + self._model_info_list = model_info_list + self._models = [ModelInfo(model_info) for model_info in model_info_list.models] + + def names(self) -> List[str]: + return [i.name for i in self._models] + + def __getitem__(self, key): + if isinstance(key, int): + return self._models[key] + elif key == "models": + # Return mapped models + return self._models + else: + # any other keys added in the future + return self._model_info_list[key] + + def __getattr__(self, attr): + if attr == "models": + return self._models + else: + # any other keys added in the future + return getattr(self._model_info_list, attr) + + def __len__(self): + return len(self._models) + + def __iter__(self): + return iter(self._models) + + def __str__(self): + return str(self._models) + + def __repr__(self): + raw_dict = self._model_info_list.to_dict() + raw_dict["models"] = [i.to_dict() for i in self._models] + + # Remove keys with value None + for key, value in list(raw_dict.items()): + if value is None: + del raw_dict[key] + + return json.dumps(raw_dict, indent=4, default=custom_serializer) diff --git a/pinecone/inference/resources/asyncio/model.py b/pinecone/inference/resources/asyncio/model.py new file mode 100644 index 00000000..2d54ebd2 --- /dev/null +++ b/pinecone/inference/resources/asyncio/model.py @@ -0,0 +1,47 @@ +from typing import TYPE_CHECKING, Optional +from pinecone.utils import require_kwargs, parse_non_empty_args +from ...models import ModelInfoList, ModelInfo + + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.api.inference_api import AsyncioInferenceApi + + +class ModelAsyncio: + def __init__(self, inference_api: "AsyncioInferenceApi") -> None: + self.__inference_api = inference_api + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + async def list( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + model_list = await self.__inference_api.list_models(**args) + return ModelInfoList(model_list) + + @require_kwargs + async def get(self, model_name: str) -> ModelInfo: + """ + Get a specific model by name. + + :param model_name: The name of the model to get. + :type model_name: str, required + + :return: A model. + """ + model_info = await self.__inference_api.get_model(model_name=model_name) + return ModelInfo(model_info) diff --git a/pinecone/inference/resources/sync/model.py b/pinecone/inference/resources/sync/model.py new file mode 100644 index 00000000..19b97f90 --- /dev/null +++ b/pinecone/inference/resources/sync/model.py @@ -0,0 +1,69 @@ +from typing import TYPE_CHECKING, Optional +from pinecone.utils import PluginAware, require_kwargs, parse_non_empty_args +from ...models import ModelInfoList, ModelInfo + + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.inference.api.inference_api import InferenceApi + + +class Model(PluginAware): + def __init__( + self, + inference_api: "InferenceApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int = 1, + **kwargs, + ) -> None: + self._config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = kwargs.get("pool_threads", 1) + """ @private """ + + self.__inference_api = inference_api + """ @private """ + + super().__init__() # Initialize PluginAware + + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @require_kwargs + def list( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + return ModelInfoList(self.__inference_api.list_models(**args)) + + @require_kwargs + def get(self, model_name: str) -> ModelInfo: + """ + Get a specific model by name. + + :param model_name: The name of the model to get. + :type model_name: str, required + + :return: A model. + """ + return ModelInfo(self.__inference_api.get_model(model_name=model_name)) diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index de57ca38..403ff26f 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -2,4 +2,4 @@ # Do not edit this file manually. API_VERSION = "2025-04" -APIS_REPO_SHA = "4b1c83b3b6669e6596151a575c284ee2cf4977a7" +APIS_REPO_SHA = "7e21ca9adb6a530ce11909d6209d69551f86e9bd" diff --git a/pinecone/openapi_support/model_utils.py b/pinecone/openapi_support/model_utils.py index 163f94b4..37cf1823 100644 --- a/pinecone/openapi_support/model_utils.py +++ b/pinecone/openapi_support/model_utils.py @@ -144,7 +144,12 @@ def set_attribute(self, name, value): self._check_type, configuration=self._configuration, ) - if (name,) in self.allowed_values: + if (name,) in self.allowed_values and self._enforce_allowed_values: + # Disabling allowed_value validation on response makes the SDK + # less fragile if unexpected values are returned. For example, if + # an unexpected index status is returned, we don't want to break + # when listing indexes due to validation on the status field against + # the allowed values in the enum. check_allowed_values(self.allowed_values, (name,), value) if (name,) in self.validations: check_validations(self.validations, (name,), value, self._configuration) diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 25d48cd7..60f02e9b 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -108,9 +108,13 @@ def inference(self) -> "Inference": Inference is a namespace where an instance of the `pinecone.data.features.inference.inference.Inference` class is lazily created and cached. """ if self._inference is None: - from pinecone.db_data import _Inference + from pinecone.inference import Inference - self._inference = _Inference(config=self._config, openapi_config=self._openapi_config) + self._inference = Inference( + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) return self._inference @property diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 606210a4..b8bd7ad5 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -157,9 +157,9 @@ async def main(): def inference(self): """Dynamically create and cache the AsyncioInference instance.""" if self._inference is None: - from pinecone.db_data import _AsyncioInference + from pinecone.inference import AsyncioInference - self._inference = _AsyncioInference(api_client=self.db._index_api.api_client) + self._inference = AsyncioInference(api_client=self.db._index_api.api_client) return self._inference @property diff --git a/pinecone/scripts/repl.py b/pinecone/scripts/repl.py index 55f80c5c..7fb4058a 100644 --- a/pinecone/scripts/repl.py +++ b/pinecone/scripts/repl.py @@ -1,5 +1,6 @@ import code import logging +from pinecone.utils.repr_overrides import setup_readline_history def setup_logging(): @@ -25,6 +26,9 @@ def main(): logger = setup_logging() logger.info("Initializing environment...") + # Set up readline history + setup_readline_history() + # You can add any setup code here, such as: # - Setting environment variables # - Importing commonly used modules diff --git a/pinecone/utils/repr_overrides.py b/pinecone/utils/repr_overrides.py index e3dfdb66..ce13e487 100644 --- a/pinecone/utils/repr_overrides.py +++ b/pinecone/utils/repr_overrides.py @@ -1,15 +1,45 @@ import json from datetime import datetime +import readline +import os +import atexit def custom_serializer(obj): if isinstance(obj, datetime): return obj.isoformat() - else: + try: + # First try to get a dictionary representation if available + if hasattr(obj, "to_dict"): + return obj.to_dict() + # Fall back to string representation return str(obj) + except (TypeError, RecursionError): + # If we hit any serialization issues, return a safe string representation + return f"<{obj.__class__.__name__} object>" def install_json_repr_override(klass): klass.__repr__ = lambda self: json.dumps( self.to_dict(), indent=4, sort_keys=False, default=custom_serializer ) + + +def setup_readline_history(): + """Setup readline history for the custom REPL.""" + # Create .pinecone directory in user's home if it doesn't exist + history_dir = os.path.expanduser("~/.pinecone") + os.makedirs(history_dir, exist_ok=True) + + # Set up history file + history_file = os.path.join(history_dir, "repl_history") + + # Load history if it exists + if os.path.exists(history_file): + readline.read_history_file(history_file) + + # Set history size + readline.set_history_length(1000) + + # Save history on exit + atexit.register(readline.write_history_file, history_file) diff --git a/scripts/repl.py b/scripts/repl.py index 88c218b2..c12dae7f 100644 --- a/scripts/repl.py +++ b/scripts/repl.py @@ -1,5 +1,6 @@ import dotenv import code +import readline from pinecone import Pinecone import logging import os @@ -19,6 +20,15 @@ def main(): ) logger = logging.getLogger(__name__) + # Set up readline history + histfile = os.path.join(os.path.expanduser("~"), ".python_repl_history") + try: + readline.read_history_file(histfile) + # Set history file size + readline.set_history_length(1000) + except FileNotFoundError: + pass + # Start the interactive REPL banner = """ Welcome to the custom Python REPL! @@ -91,8 +101,12 @@ def cleanup_all(pc): # Add any other variables you want to have available in the REPL } - # Start the interactive console - code.interact(banner=banner, local=namespace) + try: + # Start the interactive console + code.interact(banner=banner, local=namespace) + finally: + # Save history when exiting + readline.write_history_file(histfile) if __name__ == "__main__": diff --git a/tests/integration/inference/asyncio/__init__.py b/tests/integration/inference/asyncio/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/inference/test_asyncio_inference.py b/tests/integration/inference/asyncio/test_embeddings.py similarity index 51% rename from tests/integration/inference/test_asyncio_inference.py rename to tests/integration/inference/asyncio/test_embeddings.py index 2870d1b3..423795bd 100644 --- a/tests/integration/inference/test_asyncio_inference.py +++ b/tests/integration/inference/asyncio/test_embeddings.py @@ -1,5 +1,5 @@ import pytest -from pinecone import PineconeAsyncio, PineconeApiException, RerankModel, EmbedModel +from pinecone import PineconeAsyncio, PineconeApiException, EmbedModel @pytest.mark.asyncio @@ -28,7 +28,7 @@ async def test_create_embeddings(self, model_input, model_output): individual_embedding = embeddings[0] assert len(individual_embedding.values) == 1024 - assert individual_embedding.vector_type.value == "dense" + assert individual_embedding.vector_type == "dense" assert len(individual_embedding["values"]) == 1024 await pc.close() @@ -132,126 +132,3 @@ async def test_can_attempt_to_use_unknown_models(self): ) assert "Model 'unknown-model' not found" in str(excinfo.value) await pc.close() - - -@pytest.mark.asyncio -class TestRerankAsyncio: - @pytest.mark.parametrize( - "model_input,model_output", - [ - (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), - ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), - ], - ) - async def test_rerank_basic(self, model_input, model_output): - # Rerank model can be passed as string or enum - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model=model_input, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model_output - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_basic_document_dicts(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "text": "dogs are pretty cool"}, - {"id": "789", "text": "I'm a cat person"}, - {"id": "456", "text": "everyone loves dogs"}, - ], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 2 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_document_dicts_custom_field(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "my_field": "dogs are pretty cool"}, - {"id": "456", "my_field": "everyone loves dogs"}, - {"id": "789", "my_field": "I'm a cat person"}, - ], - rank_fields=["my_field"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.my_field == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_basic_default_top_n(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=True, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_no_return_documents(self): - pc = PineconeAsyncio() - model = pc.inference.RerankModel.Bge_Reranker_V2_M3 - result = await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert not result.data[0].document - assert result.model == model.value - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_allows_unknown_models_to_be_passed(self): - pc = PineconeAsyncio() - - # We don't want to reject these requests client side because we want - # to remain forwards compatible with any new models that become available - model = "unknown-model" - with pytest.raises(PineconeApiException) as excinfo: - await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert "Model 'unknown-model' not found" in str(excinfo.value) - await pc.close() diff --git a/tests/integration/inference/asyncio/test_models.py b/tests/integration/inference/asyncio/test_models.py new file mode 100644 index 00000000..984c6835 --- /dev/null +++ b/tests/integration/inference/asyncio/test_models.py @@ -0,0 +1,88 @@ +import pytest +from pinecone import PineconeAsyncio +import logging + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestListModels: + async def test_list_models(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models() + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + assert models[0].type is not None + assert models[0].supported_parameters is not None + assert models[0].modality is not None + assert models[0].max_sequence_length is not None + assert models[0].max_batch_size is not None + assert models[0].provider_name is not None + + async def test_list_models_with_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(type="embed") + assert len(models) > 0 + assert models[0].type == "embed" + + models2 = await pc.inference.list_models(type="rerank") + assert len(models2) > 0 + assert models2[0].type == "rerank" + + async def test_list_models_with_vector_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(vector_type="dense") + assert len(models) > 0 + assert models[0].vector_type == "dense" + + models2 = await pc.inference.list_models(vector_type="sparse") + assert len(models2) > 0 + assert models2[0].vector_type == "sparse" + + async def test_list_models_with_type_and_vector_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(type="embed", vector_type="dense") + assert len(models) > 0 + assert models[0].type == "embed" + assert models[0].vector_type == "dense" + + async def test_list_models_new_syntax(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.model.list(type="embed", vector_type="dense") + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + + +@pytest.mark.asyncio +class TestGetModel: + async def test_get_model(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models() + first_model = models[0] + + model = await pc.inference.get_model(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length + assert model.max_batch_size == first_model.max_batch_size + assert model.provider_name == first_model.provider_name + + async def test_get_model_new_syntax(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.model.list() + first_model = models[0] + + model = await pc.inference.model.get(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length diff --git a/tests/integration/inference/asyncio/test_rerank.py b/tests/integration/inference/asyncio/test_rerank.py new file mode 100644 index 00000000..9009f262 --- /dev/null +++ b/tests/integration/inference/asyncio/test_rerank.py @@ -0,0 +1,125 @@ +import pytest +from pinecone import PineconeAsyncio, PineconeApiException, RerankModel + + +@pytest.mark.asyncio +class TestRerankAsyncio: + @pytest.mark.parametrize( + "model_input,model_output", + [ + (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), + ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), + ], + ) + async def test_rerank_basic(self, model_input, model_output): + # Rerank model can be passed as string or enum + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model=model_input, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model_output + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_basic_document_dicts(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "text": "dogs are pretty cool"}, + {"id": "789", "text": "I'm a cat person"}, + {"id": "456", "text": "everyone loves dogs"}, + ], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 2 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_document_dicts_custom_field(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "my_field": "dogs are pretty cool"}, + {"id": "456", "my_field": "everyone loves dogs"}, + {"id": "789", "my_field": "I'm a cat person"}, + ], + rank_fields=["my_field"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.my_field == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_basic_default_top_n(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=True, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_no_return_documents(self): + pc = PineconeAsyncio() + model = pc.inference.RerankModel.Bge_Reranker_V2_M3 + result = await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert not result.data[0].document + assert result.model == model.value + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_allows_unknown_models_to_be_passed(self): + pc = PineconeAsyncio() + + # We don't want to reject these requests client side because we want + # to remain forwards compatible with any new models that become available + model = "unknown-model" + with pytest.raises(PineconeApiException) as excinfo: + await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert "Model 'unknown-model' not found" in str(excinfo.value) + await pc.close() diff --git a/tests/integration/inference/sync/__init__.py b/tests/integration/inference/sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/inference/test_inference.py b/tests/integration/inference/sync/test_embeddings.py similarity index 51% rename from tests/integration/inference/test_inference.py rename to tests/integration/inference/sync/test_embeddings.py index abf73d85..f779ad8c 100644 --- a/tests/integration/inference/test_inference.py +++ b/tests/integration/inference/sync/test_embeddings.py @@ -1,5 +1,5 @@ import pytest -from pinecone import Pinecone, PineconeApiException, RerankModel, EmbedModel +from pinecone import Pinecone, PineconeApiException, EmbedModel class TestEmbed: @@ -27,7 +27,7 @@ def test_create_embeddings(self, model_input, model_output): individual_embedding = embeddings[0] assert len(individual_embedding.values) == 1024 - assert individual_embedding.vector_type.value == "dense" + assert individual_embedding.vector_type == "dense" assert len(individual_embedding["values"]) == 1024 def test_embedding_result_is_iterable(self): @@ -122,119 +122,3 @@ def test_can_attempt_to_use_unknown_models(self): parameters={"input_type": "query", "truncate": "END"}, ) assert "Model 'unknown-model' not found" in str(excinfo.value) - - -class TestRerank: - @pytest.mark.parametrize( - "model_input,model_output", - [ - (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), - ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), - ], - ) - def test_rerank_basic(self, model_input, model_output): - # Rerank model can be passed as string or enum - pc = Pinecone() - result = pc.inference.rerank( - model=model_input, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model_output - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_basic_document_dicts(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "text": "dogs are pretty cool"}, - {"id": "789", "text": "I'm a cat person"}, - {"id": "456", "text": "everyone loves dogs"}, - ], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 2 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_document_dicts_custom_field(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "my_field": "dogs are pretty cool"}, - {"id": "456", "my_field": "everyone loves dogs"}, - {"id": "789", "my_field": "I'm a cat person"}, - ], - rank_fields=["my_field"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.my_field == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_basic_default_top_n(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=True, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_no_return_documents(self): - pc = Pinecone() - model = pc.inference.RerankModel.Bge_Reranker_V2_M3 - result = pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert not result.data[0].document - assert result.model == model.value - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_allows_unknown_models_to_be_passed(self): - pc = Pinecone() - - # We don't want to reject these requests client side because we want - # to remain forwards compatible with any new models that become available - model = "unknown-model" - with pytest.raises(PineconeApiException) as excinfo: - pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert "Model 'unknown-model' not found" in str(excinfo.value) diff --git a/tests/integration/inference/sync/test_models.py b/tests/integration/inference/sync/test_models.py new file mode 100644 index 00000000..29640a08 --- /dev/null +++ b/tests/integration/inference/sync/test_models.py @@ -0,0 +1,98 @@ +import logging +from pinecone import Pinecone + +logger = logging.getLogger(__name__) + + +class TestListModels: + def test_list_models(self): + pc = Pinecone() + models = pc.inference.list_models() + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + assert models[0].type is not None + assert models[0].supported_parameters is not None + assert models[0].modality is not None + assert models[0].max_sequence_length is not None + assert models[0].max_batch_size is not None + assert models[0].provider_name is not None + + def test_list_models_new_syntax(self): + pc = Pinecone() + models = pc.inference.model.list(type="embed", vector_type="dense") + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + + def test_list_models_with_type(self): + pc = Pinecone() + models = pc.inference.list_models(type="embed") + assert len(models) > 0 + assert models[0].type == "embed" + + models2 = pc.inference.list_models(type="rerank") + assert len(models2) > 0 + assert models2[0].type == "rerank" + + def test_list_models_with_vector_type(self): + pc = Pinecone() + models = pc.inference.list_models(vector_type="dense") + assert len(models) > 0 + assert models[0].vector_type == "dense" + + models2 = pc.inference.list_models(vector_type="sparse") + assert len(models2) > 0 + assert models2[0].vector_type == "sparse" + + def test_list_models_with_type_and_vector_type(self): + pc = Pinecone() + models = pc.inference.list_models(type="embed", vector_type="dense") + assert len(models) > 0 + assert models[0].type == "embed" + assert models[0].vector_type == "dense" + + def test_model_can_be_displayed(self): + # We want to check this, since we're doing some custom + # shenanigans to the model classes to make them more user + # friendly. Want to make sure we don't break the basic + # use case of displaying the model. + pc = Pinecone() + models = pc.inference.list_models() + models.__repr__() # This should not throw + models[0].__repr__() # This should not throw + models.to_dict() # This should not throw + models[0].to_dict() # This should not throw + assert True + + +class TestGetModel: + def test_get_model(self): + pc = Pinecone() + models = pc.inference.list_models() + first_model = models[0] + + model = pc.inference.get_model(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length + assert model.max_batch_size == first_model.max_batch_size + assert model.provider_name == first_model.provider_name + + def test_get_model_new_syntax(self): + pc = Pinecone() + models = pc.inference.model.list() + first_model = models[0] + + model = pc.inference.model.get(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length diff --git a/tests/integration/inference/sync/test_rerank.py b/tests/integration/inference/sync/test_rerank.py new file mode 100644 index 00000000..7797e857 --- /dev/null +++ b/tests/integration/inference/sync/test_rerank.py @@ -0,0 +1,118 @@ +import pytest +from pinecone import Pinecone, PineconeApiException, RerankModel + + +class TestRerank: + @pytest.mark.parametrize( + "model_input,model_output", + [ + (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), + ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), + ], + ) + def test_rerank_basic(self, model_input, model_output): + # Rerank model can be passed as string or enum + pc = Pinecone() + result = pc.inference.rerank( + model=model_input, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model_output + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_basic_document_dicts(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "text": "dogs are pretty cool"}, + {"id": "789", "text": "I'm a cat person"}, + {"id": "456", "text": "everyone loves dogs"}, + ], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 2 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_document_dicts_custom_field(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "my_field": "dogs are pretty cool"}, + {"id": "456", "my_field": "everyone loves dogs"}, + {"id": "789", "my_field": "I'm a cat person"}, + ], + rank_fields=["my_field"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.my_field == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_basic_default_top_n(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=True, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_no_return_documents(self): + pc = Pinecone() + model = pc.inference.RerankModel.Bge_Reranker_V2_M3 + result = pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert not result.data[0].document + assert result.model == model.value + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_allows_unknown_models_to_be_passed(self): + pc = Pinecone() + + # We don't want to reject these requests client side because we want + # to remain forwards compatible with any new models that become available + model = "unknown-model" + with pytest.raises(PineconeApiException) as excinfo: + pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert "Model 'unknown-model' not found" in str(excinfo.value) From 80d9c95c5d9e617da4069dac1871cf82936ff344 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Fri, 16 May 2025 12:13:04 -0400 Subject: [PATCH 42/48] Add retry config for urllib3 requests (#491) ## Problem We want to automatically retry when errors occur ## Solution Implement `urllib3` retry configuration. We implemented the backup calculation with jitter ourselves because this is not available for all versions of `urllib3` that the SDK uses. ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Test Plan I created a mock server in `scripts/text-server.py` that simulates a high rate of failures (80% failure, only 1 in 5 requests succeed). `poetry run python3 scripts/test-server.py` Then I made some requests and observed logging to see what was going on. ```python >>> from pinecone import Pinecone >>> # Testing control plane with retries >>> pc = Pinecone(host='http://localhost:8000') >>> pc.list_indexes() >>> >>> # Data plane >>> idx = pc.Index(host='http://localhost:8000') >>> # enable debug logging >>> idx._vector_api.api_client.configuration.debug = True >>> >>> idx.upsert(vectors=[('1', [0.1, 0.2])]) DEBUG | pinecone.openapi_support.rest_urllib3:126 | Calling urllib3 request() DEBUG | urllib3.connectionpool:546 | http://localhost:8000 "POST /vectors/upsert HTTP/10" 500 None DEBUG | urllib3.util.retry:521 | Incremented Retry for (url='/vectors/upsert'): JitterRetry(total=4, connect=None, read=None, redirect=None, status=None) DEBUG | pinecone.openapi_support.retries:20 | Calculating retry backoff: 0.15197003184454544 (jitter: 0.15197003184454544) DEBUG | urllib3.connectionpool:943 | Retry: /vectors/upsert DEBUG | urllib3.connectionpool:546 | http://localhost:8000 "POST /vectors/upsert HTTP/10" 500 None DEBUG | urllib3.util.retry:521 | Incremented Retry for (url='/vectors/upsert'): JitterRetry(total=3, connect=None, read=None, redirect=None, status=None) DEBUG | pinecone.openapi_support.retries:20 | Calculating retry backoff: 0.7352149950424516 (jitter: 0.2352149950424516) DEBUG | urllib3.connectionpool:943 | Retry: /vectors/upsert DEBUG | urllib3.connectionpool:546 | http://localhost:8000 "POST /vectors/upsert HTTP/10" 500 None DEBUG | urllib3.util.retry:521 | Incremented Retry for (url='/vectors/upsert'): JitterRetry(total=2, connect=None, read=None, redirect=None, status=None) DEBUG | pinecone.openapi_support.retries:20 | Calculating retry backoff: 1.1307109027442626 (jitter: 0.13071090274426245) DEBUG | urllib3.connectionpool:943 | Retry: /vectors/upsert DEBUG | urllib3.connectionpool:546 | http://localhost:8000 "POST /vectors/upsert HTTP/10" 500 None DEBUG | urllib3.util.retry:521 | Incremented Retry for (url='/vectors/upsert'): JitterRetry(total=1, connect=None, read=None, redirect=None, status=None) DEBUG | pinecone.openapi_support.retries:20 | Calculating retry backoff: 2.142226695165083 (jitter: 0.14222669516508277) DEBUG | urllib3.connectionpool:943 | Retry: /vectors/upsert DEBUG | urllib3.connectionpool:546 | http://localhost:8000 "POST /vectors/upsert HTTP/10" 200 None DEBUG | pinecone.openapi_support.rest_urllib3:266 | response body: b'{"upsertedCount": 10}' DEBUG | pinecone.openapi_support.rest_utils:34 | response status: 200 {'upserted_count': 10} ``` --- pinecone/openapi_support/rest_urllib3.py | 9 +- pinecone/openapi_support/retries.py | 21 ++++ scripts/repl.py | 1 + scripts/test-server.py | 136 +++++++++++++++++++++ tests/unit/openapi_support/test_retries.py | 49 ++++++++ 5 files changed, 215 insertions(+), 1 deletion(-) create mode 100644 pinecone/openapi_support/retries.py create mode 100644 scripts/test-server.py create mode 100644 tests/unit/openapi_support/test_retries.py diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index 0c1a1c5a..f310ca99 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -8,7 +8,7 @@ from .rest_utils import raise_exceptions_or_return, RESTResponse, RestClientInterface import urllib3 - +from .retries import JitterRetry from .exceptions import PineconeApiException, PineconeApiValueError @@ -52,6 +52,13 @@ def __init__( if configuration.retries is not None: addition_pool_args["retries"] = configuration.retries + else: + addition_pool_args["retries"] = JitterRetry( + total=5, + backoff_factor=0.25, + status_forcelist=(500, 502, 503, 504), + allowed_methods=None, + ) if configuration.socket_options is not None: addition_pool_args["socket_options"] = configuration.socket_options diff --git a/pinecone/openapi_support/retries.py b/pinecone/openapi_support/retries.py new file mode 100644 index 00000000..2b91a31d --- /dev/null +++ b/pinecone/openapi_support/retries.py @@ -0,0 +1,21 @@ +import random +from urllib3.util.retry import Retry +import logging + +logger = logging.getLogger(__name__) + + +class JitterRetry(Retry): + """ + Retry with exponential back‑off with jitter. + + The Retry class is being extended as built-in support for jitter was added only in urllib3 2.0.0. + Jitter logic is following the official implementation with a constant jitter factor: https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py + """ + + def get_backoff_time(self) -> float: + backoff_value = super().get_backoff_time() + jitter = random.random() * 0.25 + backoff_value += jitter + logger.debug(f"Calculating retry backoff: {backoff_value} (jitter: {jitter})") + return backoff_value diff --git a/scripts/repl.py b/scripts/repl.py index c12dae7f..82d5ce26 100644 --- a/scripts/repl.py +++ b/scripts/repl.py @@ -98,6 +98,7 @@ def cleanup_all(pc): "delete_all_collections": delete_all_collections, "delete_all_backups": delete_all_backups, "cleanup_all": cleanup_all, + "pcl": Pinecone(host="http://localhost:8000"), # Add any other variables you want to have available in the REPL } diff --git a/scripts/test-server.py b/scripts/test-server.py new file mode 100644 index 00000000..784d510d --- /dev/null +++ b/scripts/test-server.py @@ -0,0 +1,136 @@ +from http.server import BaseHTTPRequestHandler, HTTPServer +import json + +backups_response = { + "data": [ + { + "backup_id": "6f52240b-6397-481b-9767-748a2d4d3b65", + "source_index_name": "jensparse", + "source_index_id": "71ded150-2b8e-422d-9849-097f2c89d18b", + "status": "Ready", + "cloud": "aws", + "region": "us-east-1", + "tags": {}, + "name": "sparsebackup", + "description": "", + "dimension": 0, + "record_count": 10000, + "namespace_count": 1000, + "size_bytes": 123456, + "created_at": "2025-05-15T20:55:29.477794Z", + } + ] +} + +indexes_response = { + "indexes": [ + { + "name": "jhamon-20250515-165135548-reorg-create-with-e", + "metric": "dotproduct", + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": True, "state": "Ready"}, + "vector_type": "sparse", + "dimension": None, + "deletion_protection": "disabled", + "tags": {"env": "dev"}, + }, + { + "name": "unexpected", + "metric": "newmetric", + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": False, "state": "UnknownStatus"}, + "vector_type": "sparse", + "dimension": -1, + "deletion_protection": "disabled", + "tags": {"env": "dev"}, + }, + { + "name": "wrong-types", + "metric": 123, + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": False, "state": "UnknownStatus"}, + "vector_type": None, + "dimension": None, + "deletion_protection": "asdf", + "tags": None, + }, + ] +} + +index_description_response = { + "name": "docs-example-dense", + "vector_type": "dense", + "metric": "cosine", + "dimension": 1536, + "status": {"ready": True, "state": "Ready"}, + "host": "docs-example-dense-govk0nt.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"region": "us-east-1", "cloud": "aws"}}, + "deletion_protection": "disabled", + "tags": {"environment": "development"}, +} + +upsert_response = {"upsertedCount": 10} + +call_count = 0 + + +class MyHandler(BaseHTTPRequestHandler): + def do_POST(self): + global call_count + call_count += 1 + + # Simulate a high rate of 500 errors + if call_count % 5 != 0: + self.send_response(500) + self.end_headers() + return + + if self.path.startswith("/vectors/upsert"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = upsert_response + self.wfile.write(json.dumps(response).encode()) + else: + self.send_response(404) + self.end_headers() + + def do_GET(self): + global call_count + call_count += 1 + + # Simulate a high rate of 500 errors + if call_count % 5 != 0: + self.send_response(500) + self.end_headers() + return + + if self.path.startswith("/backups"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = backups_response + self.wfile.write(json.dumps(response).encode()) + elif self.path.startswith("/indexes/"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = index_description_response + self.wfile.write(json.dumps(response).encode()) + elif self.path.startswith("/indexes"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = indexes_response + self.wfile.write(json.dumps(response).encode()) + else: + self.send_response(404) + self.end_headers() + + +server = HTTPServer(("localhost", 8000), MyHandler) +print("Serving on http://localhost:8000") +server.serve_forever() diff --git a/tests/unit/openapi_support/test_retries.py b/tests/unit/openapi_support/test_retries.py new file mode 100644 index 00000000..5f31221d --- /dev/null +++ b/tests/unit/openapi_support/test_retries.py @@ -0,0 +1,49 @@ +import pytest +from unittest.mock import patch, MagicMock +from urllib3.exceptions import MaxRetryError +from urllib3.util.retry import Retry +from pinecone.openapi_support.retries import JitterRetry + + +def test_jitter_retry_backoff(): + """Test that the backoff time includes jitter.""" + retry = JitterRetry( + total=5, + backoff_factor=0.25, + backoff_max=3, + status_forcelist=(500, 502, 503, 504), + allowed_methods=None, + ) + + # Mock the parent's get_backoff_time to return a fixed value + with patch.object(Retry, "get_backoff_time", return_value=1.0): + # Test multiple times to ensure jitter is added + backoff_times = [retry.get_backoff_time() for _ in range(100)] + + # All backoff times should be between 1.0 and 1.25 + assert all(1.0 <= t <= 1.25 for t in backoff_times) + # Values should be different (jitter is working) + assert len(set(backoff_times)) > 1 + + +def test_jitter_retry_behavior(): + """Test that retries actually occur and respect the total count.""" + retry = JitterRetry(total=3) + mock_response = MagicMock() + mock_response.status = 500 # Simulate server error + + # Simulate a failing request + with pytest.raises(MaxRetryError) as exc_info: + retry2 = retry.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry3 = retry2.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry4 = retry3.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry4.increment(method="GET", url="http://test.com", response=mock_response, error=None) + + # Verify the error contains the expected information + assert "Max retries exceeded" in str(exc_info.value) From 24296eb8b10dc7d3e48f6f909ba19aa93fe97b2e Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Fri, 16 May 2025 12:14:12 -0400 Subject: [PATCH 43/48] Disable client-side validation of responses (#490) ## Problem Sometimes unexpected values in API responses can cause unnecessary errors due to validation logic being applied. Fields labeled in the openapi as `enum` fields will error when unexpected values appear in the response. In general, we want the client to just display what the API returns without applying validation. ## Solution Adjust the code generation to disable validation logic when instantiating model objects from API response. ## Type of Change - [x] Bug fix (non-breaking change which fixes an issue) ## Test Plan I created a mock server script to do some manual testing of different responses with odd values in them and saw this works without erroring. For example, these responses no longer raise: - New index status - Index dimension > 20k - Index name too long --- codegen/python-oas-templates | 2 +- pinecone/core/openapi/db_control/model/backup_list.py | 5 +++++ pinecone/core/openapi/db_control/model/backup_model.py | 5 +++++ pinecone/core/openapi/db_control/model/byoc_spec.py | 5 +++++ pinecone/core/openapi/db_control/model/collection_list.py | 5 +++++ pinecone/core/openapi/db_control/model/collection_model.py | 5 +++++ .../core/openapi/db_control/model/configure_index_request.py | 5 +++++ .../db_control/model/configure_index_request_embed.py | 5 +++++ .../openapi/db_control/model/configure_index_request_spec.py | 5 +++++ .../db_control/model/configure_index_request_spec_pod.py | 5 +++++ .../core/openapi/db_control/model/create_backup_request.py | 5 +++++ .../openapi/db_control/model/create_collection_request.py | 5 +++++ .../db_control/model/create_index_for_model_request.py | 5 +++++ .../db_control/model/create_index_for_model_request_embed.py | 5 +++++ .../db_control/model/create_index_from_backup_request.py | 5 +++++ .../db_control/model/create_index_from_backup_response.py | 5 +++++ .../core/openapi/db_control/model/create_index_request.py | 5 +++++ .../core/openapi/db_control/model/deletion_protection.py | 5 +++++ pinecone/core/openapi/db_control/model/error_response.py | 5 +++++ .../core/openapi/db_control/model/error_response_error.py | 5 +++++ pinecone/core/openapi/db_control/model/index_list.py | 5 +++++ pinecone/core/openapi/db_control/model/index_model.py | 5 +++++ pinecone/core/openapi/db_control/model/index_model_spec.py | 5 +++++ pinecone/core/openapi/db_control/model/index_model_status.py | 5 +++++ pinecone/core/openapi/db_control/model/index_spec.py | 5 +++++ pinecone/core/openapi/db_control/model/index_tags.py | 5 +++++ pinecone/core/openapi/db_control/model/model_index_embed.py | 5 +++++ .../core/openapi/db_control/model/pagination_response.py | 5 +++++ pinecone/core/openapi/db_control/model/pod_spec.py | 5 +++++ .../openapi/db_control/model/pod_spec_metadata_config.py | 5 +++++ pinecone/core/openapi/db_control/model/restore_job_list.py | 5 +++++ pinecone/core/openapi/db_control/model/restore_job_model.py | 5 +++++ pinecone/core/openapi/db_control/model/serverless_spec.py | 5 +++++ pinecone/core/openapi/db_data/model/delete_request.py | 5 +++++ .../openapi/db_data/model/describe_index_stats_request.py | 5 +++++ pinecone/core/openapi/db_data/model/fetch_response.py | 5 +++++ pinecone/core/openapi/db_data/model/hit.py | 5 +++++ pinecone/core/openapi/db_data/model/import_error_mode.py | 5 +++++ pinecone/core/openapi/db_data/model/import_model.py | 5 +++++ pinecone/core/openapi/db_data/model/index_description.py | 5 +++++ pinecone/core/openapi/db_data/model/list_imports_response.py | 5 +++++ pinecone/core/openapi/db_data/model/list_item.py | 5 +++++ .../core/openapi/db_data/model/list_namespaces_response.py | 5 +++++ pinecone/core/openapi/db_data/model/list_response.py | 5 +++++ pinecone/core/openapi/db_data/model/namespace_description.py | 5 +++++ pinecone/core/openapi/db_data/model/namespace_summary.py | 5 +++++ pinecone/core/openapi/db_data/model/pagination.py | 5 +++++ pinecone/core/openapi/db_data/model/protobuf_any.py | 5 +++++ pinecone/core/openapi/db_data/model/protobuf_null_value.py | 5 +++++ pinecone/core/openapi/db_data/model/query_request.py | 5 +++++ pinecone/core/openapi/db_data/model/query_response.py | 5 +++++ pinecone/core/openapi/db_data/model/query_vector.py | 5 +++++ pinecone/core/openapi/db_data/model/rpc_status.py | 5 +++++ pinecone/core/openapi/db_data/model/scored_vector.py | 5 +++++ .../core/openapi/db_data/model/search_records_request.py | 5 +++++ .../openapi/db_data/model/search_records_request_query.py | 5 +++++ .../openapi/db_data/model/search_records_request_rerank.py | 5 +++++ .../core/openapi/db_data/model/search_records_response.py | 5 +++++ .../openapi/db_data/model/search_records_response_result.py | 5 +++++ pinecone/core/openapi/db_data/model/search_records_vector.py | 5 +++++ pinecone/core/openapi/db_data/model/search_usage.py | 5 +++++ pinecone/core/openapi/db_data/model/search_vector.py | 5 +++++ pinecone/core/openapi/db_data/model/single_query_results.py | 5 +++++ pinecone/core/openapi/db_data/model/sparse_values.py | 5 +++++ pinecone/core/openapi/db_data/model/start_import_request.py | 5 +++++ pinecone/core/openapi/db_data/model/start_import_response.py | 5 +++++ pinecone/core/openapi/db_data/model/update_request.py | 5 +++++ pinecone/core/openapi/db_data/model/upsert_record.py | 5 +++++ pinecone/core/openapi/db_data/model/upsert_request.py | 5 +++++ pinecone/core/openapi/db_data/model/upsert_response.py | 5 +++++ pinecone/core/openapi/db_data/model/usage.py | 5 +++++ pinecone/core/openapi/db_data/model/vector.py | 5 +++++ pinecone/core/openapi/db_data/model/vector_values.py | 5 +++++ pinecone/core/openapi/inference/model/dense_embedding.py | 5 +++++ pinecone/core/openapi/inference/model/document.py | 5 +++++ pinecone/core/openapi/inference/model/embed_request.py | 5 +++++ .../core/openapi/inference/model/embed_request_inputs.py | 5 +++++ pinecone/core/openapi/inference/model/embedding.py | 3 +++ pinecone/core/openapi/inference/model/embeddings_list.py | 5 +++++ .../core/openapi/inference/model/embeddings_list_usage.py | 5 +++++ pinecone/core/openapi/inference/model/error_response.py | 5 +++++ .../core/openapi/inference/model/error_response_error.py | 5 +++++ pinecone/core/openapi/inference/model/model_info.py | 5 +++++ pinecone/core/openapi/inference/model/model_info_list.py | 5 +++++ pinecone/core/openapi/inference/model/model_info_metric.py | 5 +++++ .../openapi/inference/model/model_info_supported_metrics.py | 5 +++++ .../inference/model/model_info_supported_parameter.py | 5 +++++ pinecone/core/openapi/inference/model/ranked_document.py | 5 +++++ pinecone/core/openapi/inference/model/rerank_request.py | 5 +++++ pinecone/core/openapi/inference/model/rerank_result.py | 5 +++++ pinecone/core/openapi/inference/model/rerank_result_usage.py | 5 +++++ pinecone/core/openapi/inference/model/sparse_embedding.py | 5 +++++ pinecone/openapi_support/model_utils.py | 5 ++++- 93 files changed, 458 insertions(+), 2 deletions(-) diff --git a/codegen/python-oas-templates b/codegen/python-oas-templates index 2ba53806..57a4c44d 160000 --- a/codegen/python-oas-templates +++ b/codegen/python-oas-templates @@ -1 +1 @@ -Subproject commit 2ba53806258cc8ab42ced7e52ba84dce1e977c6d +Subproject commit 57a4c44d6f18bbabedfe25aa173359c37fb9f705 diff --git a/pinecone/core/openapi/db_control/model/backup_list.py b/pinecone/core/openapi/db_control/model/backup_list.py index a7a637ad..c485a03d 100644 --- a/pinecone/core/openapi/db_control/model/backup_list.py +++ b/pinecone/core/openapi/db_control/model/backup_list.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -256,6 +260,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/backup_model.py b/pinecone/core/openapi/db_control/model/backup_model.py index 817677e4..5b50ba9d 100644 --- a/pinecone/core/openapi/db_control/model/backup_model.py +++ b/pinecone/core/openapi/db_control/model/backup_model.py @@ -205,6 +205,7 @@ def _from_openapi_data( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -223,6 +224,7 @@ def _from_openapi_data( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -250,6 +252,7 @@ def _from_openapi_data( required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -316,6 +319,7 @@ def __init__( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -332,6 +336,7 @@ def __init__( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/byoc_spec.py b/pinecone/core/openapi/db_control/model/byoc_spec.py index a78279f2..4d7a843d 100644 --- a/pinecone/core/openapi/db_control/model/byoc_spec.py +++ b/pinecone/core/openapi/db_control/model/byoc_spec.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -231,6 +234,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -247,6 +251,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/collection_list.py b/pinecone/core/openapi/db_control/model/collection_list.py index e0618662..8afb0b7e 100644 --- a/pinecone/core/openapi/db_control/model/collection_list.py +++ b/pinecone/core/openapi/db_control/model/collection_list.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/collection_model.py b/pinecone/core/openapi/db_control/model/collection_model.py index 5b3b9435..bb8e6577 100644 --- a/pinecone/core/openapi/db_control/model/collection_model.py +++ b/pinecone/core/openapi/db_control/model/collection_model.py @@ -164,6 +164,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +183,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -206,6 +208,7 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -261,6 +264,7 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -277,6 +281,7 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request.py b/pinecone/core/openapi/db_control/model/configure_index_request.py index bc79fd9d..352166e0 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request.py @@ -166,6 +166,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -184,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -205,6 +207,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -256,6 +259,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +276,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py index f63db37d..c3b1fc2b 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py @@ -148,6 +148,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -166,6 +167,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -187,6 +189,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py index 840127a9..5f2b0668 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +170,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -241,6 +244,7 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -257,6 +261,7 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py index adf0efd5..91909c75 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py @@ -144,6 +144,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +163,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +185,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -232,6 +235,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -248,6 +252,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py index df3ef581..6375f18f 100644 --- a/pinecone/core/openapi/db_control/model/create_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -142,6 +142,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +161,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -246,6 +250,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_collection_request.py b/pinecone/core/openapi/db_control/model/create_collection_request.py index f1b0e06f..544d5f96 100644 --- a/pinecone/core/openapi/db_control/model/create_collection_request.py +++ b/pinecone/core/openapi/db_control/model/create_collection_request.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +165,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -187,6 +189,7 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py index 05a4b10f..6fe5fe79 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py @@ -174,6 +174,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -192,6 +193,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -217,6 +219,7 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -272,6 +275,7 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -288,6 +292,7 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py index a70cce3e..63693c90 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py @@ -158,6 +158,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +177,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +201,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +257,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -270,6 +274,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py index ef3127fa..1070f4eb 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py @@ -159,6 +159,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +178,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +201,7 @@ def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -251,6 +254,7 @@ def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +271,7 @@ def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py index dc6bbbbd..360df0c2 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py @@ -144,6 +144,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +163,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -185,6 +187,7 @@ def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +239,7 @@ def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +256,7 @@ def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_request.py b/pinecone/core/openapi/db_control/model/create_index_request.py index 55a6096e..06c11c97 100644 --- a/pinecone/core/openapi/db_control/model/create_index_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_request.py @@ -176,6 +176,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -194,6 +195,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -217,6 +219,7 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -273,6 +276,7 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -289,6 +293,7 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/deletion_protection.py b/pinecone/core/openapi/db_control/model/deletion_protection.py index 4b67f67e..c70945a2 100644 --- a/pinecone/core/openapi/db_control/model/deletion_protection.py +++ b/pinecone/core/openapi/db_control/model/deletion_protection.py @@ -96,6 +96,7 @@ def discriminator(cls): required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -169,6 +170,7 @@ def __init__(self, *args, **kwargs) -> None: value = "disabled" _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -176,6 +178,7 @@ def __init__(self, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -257,6 +260,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: value = "disabled" _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -264,6 +268,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response.py b/pinecone/core/openapi/db_control/model/error_response.py index 886a9c39..23445308 100644 --- a/pinecone/core/openapi/db_control/model/error_response.py +++ b/pinecone/core/openapi/db_control/model/error_response.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +195,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -260,6 +264,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response_error.py b/pinecone/core/openapi/db_control/model/error_response_error.py index 2cf453cc..30cc62ac 100644 --- a/pinecone/core/openapi/db_control/model/error_response_error.py +++ b/pinecone/core/openapi/db_control/model/error_response_error.py @@ -170,6 +170,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -188,6 +189,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -211,6 +213,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -263,6 +266,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -279,6 +283,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_list.py b/pinecone/core/openapi/db_control/model/index_list.py index 6b22bea3..b2f7468e 100644 --- a/pinecone/core/openapi/db_control/model/index_list.py +++ b/pinecone/core/openapi/db_control/model/index_list.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model.py b/pinecone/core/openapi/db_control/model/index_model.py index 5ded19c3..97ada3aa 100644 --- a/pinecone/core/openapi/db_control/model/index_model.py +++ b/pinecone/core/openapi/db_control/model/index_model.py @@ -190,6 +190,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** vector_type = kwargs.get("vector_type", "dense") _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -208,6 +209,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -235,6 +237,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -295,6 +298,7 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: vector_type = kwargs.get("vector_type", "dense") _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -311,6 +315,7 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index 1b5cd871..7fc5452b 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -157,6 +157,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -175,6 +176,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +249,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -262,6 +266,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_status.py b/pinecone/core/openapi/db_control/model/index_model_status.py index d549526d..52821c68 100644 --- a/pinecone/core/openapi/db_control/model/index_model_status.py +++ b/pinecone/core/openapi/db_control/model/index_model_status.py @@ -156,6 +156,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -174,6 +175,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -197,6 +199,7 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -248,6 +251,7 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index d0acd19c..fe1ac44e 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -150,6 +150,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +242,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_tags.py b/pinecone/core/openapi/db_control/model/index_tags.py index c71dc4f9..62f17fb0 100644 --- a/pinecone/core/openapi/db_control/model/index_tags.py +++ b/pinecone/core/openapi/db_control/model/index_tags.py @@ -134,6 +134,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -152,6 +153,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -173,6 +175,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -220,6 +223,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -236,6 +240,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/model_index_embed.py b/pinecone/core/openapi/db_control/model/model_index_embed.py index c47df155..1a7f2010 100644 --- a/pinecone/core/openapi/db_control/model/model_index_embed.py +++ b/pinecone/core/openapi/db_control/model/model_index_embed.py @@ -163,6 +163,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -181,6 +182,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -203,6 +205,7 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -259,6 +262,7 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -275,6 +279,7 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pagination_response.py b/pinecone/core/openapi/db_control/model/pagination_response.py index 1942c2c5..8a954cc4 100644 --- a/pinecone/core/openapi/db_control/model/pagination_response.py +++ b/pinecone/core/openapi/db_control/model/pagination_response.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -231,6 +234,7 @@ def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -247,6 +251,7 @@ def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pod_spec.py b/pinecone/core/openapi/db_control/model/pod_spec.py index d19d2645..64c0b2a7 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec.py +++ b/pinecone/core/openapi/db_control/model/pod_spec.py @@ -174,6 +174,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa pod_type = kwargs.get("pod_type", "p1.x1") _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -192,6 +193,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -215,6 +217,7 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -272,6 +275,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 pod_type = kwargs.get("pod_type", "p1.x1") _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -288,6 +292,7 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py index 02b855a1..e605a141 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py +++ b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/restore_job_list.py b/pinecone/core/openapi/db_control/model/restore_job_list.py index bcf4bd62..2f39d91c 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_list.py +++ b/pinecone/core/openapi/db_control/model/restore_job_list.py @@ -154,6 +154,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -172,6 +173,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -245,6 +248,7 @@ def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +265,7 @@ def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/restore_job_model.py b/pinecone/core/openapi/db_control/model/restore_job_model.py index bc75aba4..951200d1 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_model.py +++ b/pinecone/core/openapi/db_control/model/restore_job_model.py @@ -174,6 +174,7 @@ def _from_openapi_data( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -192,6 +193,7 @@ def _from_openapi_data( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -219,6 +221,7 @@ def _from_openapi_data( required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -286,6 +289,7 @@ def __init__( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -302,6 +306,7 @@ def __init__( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/serverless_spec.py b/pinecone/core/openapi/db_control/model/serverless_spec.py index 595ad811..efa9157e 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec.py @@ -146,6 +146,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +165,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -187,6 +189,7 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index 1f2abd25..0d3409a7 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -148,6 +148,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -166,6 +167,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -187,6 +189,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index 059c79a8..6c54d92f 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/fetch_response.py b/pinecone/core/openapi/db_data/model/fetch_response.py index b3d23ef9..092fad1c 100644 --- a/pinecone/core/openapi/db_data/model/fetch_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_response.py @@ -155,6 +155,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +174,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -260,6 +264,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/hit.py b/pinecone/core/openapi/db_data/model/hit.py index c6407d2f..1a7431d1 100644 --- a/pinecone/core/openapi/db_data/model/hit.py +++ b/pinecone/core/openapi/db_data/model/hit.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -241,6 +244,7 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -257,6 +261,7 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_error_mode.py b/pinecone/core/openapi/db_data/model/import_error_mode.py index 66bd3fd0..2f320d88 100644 --- a/pinecone/core/openapi/db_data/model/import_error_mode.py +++ b/pinecone/core/openapi/db_data/model/import_error_mode.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +182,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -244,6 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_model.py b/pinecone/core/openapi/db_data/model/import_model.py index 4c87e189..6bb3c296 100644 --- a/pinecone/core/openapi/db_data/model/import_model.py +++ b/pinecone/core/openapi/db_data/model/import_model.py @@ -171,6 +171,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -189,6 +190,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -210,6 +212,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -265,6 +268,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -281,6 +285,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/index_description.py b/pinecone/core/openapi/db_data/model/index_description.py index af438c59..0af2c259 100644 --- a/pinecone/core/openapi/db_data/model/index_description.py +++ b/pinecone/core/openapi/db_data/model/index_description.py @@ -162,6 +162,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +181,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +257,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -270,6 +274,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_imports_response.py b/pinecone/core/openapi/db_data/model/list_imports_response.py index ede53ad1..d2321fb8 100644 --- a/pinecone/core/openapi/db_data/model/list_imports_response.py +++ b/pinecone/core/openapi/db_data/model/list_imports_response.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -256,6 +260,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_item.py b/pinecone/core/openapi/db_data/model/list_item.py index eed68e6d..22d2e0fd 100644 --- a/pinecone/core/openapi/db_data/model/list_item.py +++ b/pinecone/core/openapi/db_data/model/list_item.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_namespaces_response.py b/pinecone/core/openapi/db_data/model/list_namespaces_response.py index 1699d04a..5bbc61be 100644 --- a/pinecone/core/openapi/db_data/model/list_namespaces_response.py +++ b/pinecone/core/openapi/db_data/model/list_namespaces_response.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +243,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -256,6 +260,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_response.py b/pinecone/core/openapi/db_data/model/list_response.py index 4e28ce18..f5ea54af 100644 --- a/pinecone/core/openapi/db_data/model/list_response.py +++ b/pinecone/core/openapi/db_data/model/list_response.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -178,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +201,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +270,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/namespace_description.py b/pinecone/core/openapi/db_data/model/namespace_description.py index 4dcde11c..abd3fc50 100644 --- a/pinecone/core/openapi/db_data/model/namespace_description.py +++ b/pinecone/core/openapi/db_data/model/namespace_description.py @@ -142,6 +142,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +161,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -246,6 +250,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/namespace_summary.py b/pinecone/core/openapi/db_data/model/namespace_summary.py index 15800b09..752f95ee 100644 --- a/pinecone/core/openapi/db_data/model/namespace_summary.py +++ b/pinecone/core/openapi/db_data/model/namespace_summary.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/pagination.py b/pinecone/core/openapi/db_data/model/pagination.py index 89d07865..6ddb4973 100644 --- a/pinecone/core/openapi/db_data/model/pagination.py +++ b/pinecone/core/openapi/db_data/model/pagination.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_any.py b/pinecone/core/openapi/db_data/model/protobuf_any.py index 218f294f..fe7f54c2 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_any.py +++ b/pinecone/core/openapi/db_data/model/protobuf_any.py @@ -142,6 +142,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +161,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +233,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -246,6 +250,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_null_value.py b/pinecone/core/openapi/db_data/model/protobuf_null_value.py index ed47d38d..ecf6b359 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_null_value.py +++ b/pinecone/core/openapi/db_data/model/protobuf_null_value.py @@ -96,6 +96,7 @@ def discriminator(cls): required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -169,6 +170,7 @@ def __init__(self, *args, **kwargs) -> None: value = "NULL_VALUE" _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -176,6 +178,7 @@ def __init__(self, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -257,6 +260,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: value = "NULL_VALUE" _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -264,6 +268,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 41ab6a52..989ad83e 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -180,6 +180,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -198,6 +199,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -220,6 +222,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -278,6 +281,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -294,6 +298,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_response.py b/pinecone/core/openapi/db_data/model/query_response.py index aaabb0bd..9d693f34 100644 --- a/pinecone/core/openapi/db_data/model/query_response.py +++ b/pinecone/core/openapi/db_data/model/query_response.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -178,6 +179,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +201,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +270,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_vector.py b/pinecone/core/openapi/db_data/model/query_vector.py index 903ce62a..3ea0196f 100644 --- a/pinecone/core/openapi/db_data/model/query_vector.py +++ b/pinecone/core/openapi/db_data/model/query_vector.py @@ -164,6 +164,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +183,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -204,6 +206,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -258,6 +261,7 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -274,6 +278,7 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/rpc_status.py b/pinecone/core/openapi/db_data/model/rpc_status.py index cb6c7d24..ac8da180 100644 --- a/pinecone/core/openapi/db_data/model/rpc_status.py +++ b/pinecone/core/openapi/db_data/model/rpc_status.py @@ -153,6 +153,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -192,6 +194,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -242,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -258,6 +262,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/scored_vector.py b/pinecone/core/openapi/db_data/model/scored_vector.py index 3248def5..61f28530 100644 --- a/pinecone/core/openapi/db_data/model/scored_vector.py +++ b/pinecone/core/openapi/db_data/model/scored_vector.py @@ -163,6 +163,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -181,6 +182,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -203,6 +205,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -257,6 +260,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -273,6 +277,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request.py b/pinecone/core/openapi/db_data/model/search_records_request.py index a85244af..19b0ba55 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request.py +++ b/pinecone/core/openapi/db_data/model/search_records_request.py @@ -161,6 +161,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +180,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -253,6 +256,7 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -269,6 +273,7 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index 659cc057..790dbf82 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -161,6 +161,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +180,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -255,6 +258,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -271,6 +275,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index bc80d08b..b365a7d3 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -153,6 +153,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -248,6 +251,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response.py b/pinecone/core/openapi/db_data/model/search_records_response.py index a46aa0c4..229b60dd 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response.py +++ b/pinecone/core/openapi/db_data/model/search_records_response.py @@ -156,6 +156,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -174,6 +175,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -197,6 +199,7 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -248,6 +251,7 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response_result.py b/pinecone/core/openapi/db_data/model/search_records_response_result.py index f479c0a4..ab04277f 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response_result.py +++ b/pinecone/core/openapi/db_data/model/search_records_response_result.py @@ -149,6 +149,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -167,6 +168,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +242,7 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_vector.py b/pinecone/core/openapi/db_data/model/search_records_vector.py index 81076824..34afe2cf 100644 --- a/pinecone/core/openapi/db_data/model/search_records_vector.py +++ b/pinecone/core/openapi/db_data/model/search_records_vector.py @@ -153,6 +153,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +172,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -192,6 +194,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -242,6 +245,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -258,6 +262,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_usage.py b/pinecone/core/openapi/db_data/model/search_usage.py index ca7cda7c..c4444c8b 100644 --- a/pinecone/core/openapi/db_data/model/search_usage.py +++ b/pinecone/core/openapi/db_data/model/search_usage.py @@ -151,6 +151,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +170,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +193,7 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -243,6 +246,7 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_vector.py b/pinecone/core/openapi/db_data/model/search_vector.py index 74ca49d4..00be22b5 100644 --- a/pinecone/core/openapi/db_data/model/search_vector.py +++ b/pinecone/core/openapi/db_data/model/search_vector.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/single_query_results.py b/pinecone/core/openapi/db_data/model/single_query_results.py index 9c790616..d5636688 100644 --- a/pinecone/core/openapi/db_data/model/single_query_results.py +++ b/pinecone/core/openapi/db_data/model/single_query_results.py @@ -150,6 +150,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +169,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +191,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +241,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +258,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/sparse_values.py b/pinecone/core/openapi/db_data/model/sparse_values.py index 6b5acd85..8100f664 100644 --- a/pinecone/core/openapi/db_data/model/sparse_values.py +++ b/pinecone/core/openapi/db_data/model/sparse_values.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +190,7 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +242,7 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +259,7 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index f8b4602a..20e23275 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -158,6 +158,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +177,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +200,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +253,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +270,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_response.py b/pinecone/core/openapi/db_data/model/start_import_response.py index 0eba3dd9..d8511fe8 100644 --- a/pinecone/core/openapi/db_data/model/start_import_response.py +++ b/pinecone/core/openapi/db_data/model/start_import_response.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +182,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -244,6 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/update_request.py b/pinecone/core/openapi/db_data/model/update_request.py index b2a879d5..c45849b1 100644 --- a/pinecone/core/openapi/db_data/model/update_request.py +++ b/pinecone/core/openapi/db_data/model/update_request.py @@ -164,6 +164,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +183,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -204,6 +206,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -258,6 +261,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -274,6 +278,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_record.py b/pinecone/core/openapi/db_data/model/upsert_record.py index 65739cc4..31445ab4 100644 --- a/pinecone/core/openapi/db_data/model/upsert_record.py +++ b/pinecone/core/openapi/db_data/model/upsert_record.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +183,7 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -231,6 +234,7 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -247,6 +251,7 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index b5bc0b77..fccfb3c8 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -192,6 +194,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -243,6 +246,7 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +263,7 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_response.py b/pinecone/core/openapi/db_data/model/upsert_response.py index 98fe68aa..57098ed2 100644 --- a/pinecone/core/openapi/db_data/model/upsert_response.py +++ b/pinecone/core/openapi/db_data/model/upsert_response.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/usage.py b/pinecone/core/openapi/db_data/model/usage.py index d8b02b78..61f3faa5 100644 --- a/pinecone/core/openapi/db_data/model/usage.py +++ b/pinecone/core/openapi/db_data/model/usage.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector.py b/pinecone/core/openapi/db_data/model/vector.py index ddda25d3..a83536a5 100644 --- a/pinecone/core/openapi/db_data/model/vector.py +++ b/pinecone/core/openapi/db_data/model/vector.py @@ -161,6 +161,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +180,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +203,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +257,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -270,6 +274,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector_values.py b/pinecone/core/openapi/db_data/model/vector_values.py index ad32376f..b18494cd 100644 --- a/pinecone/core/openapi/db_data/model/vector_values.py +++ b/pinecone/core/openapi/db_data/model/vector_values.py @@ -94,6 +94,7 @@ def discriminator(cls): required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -171,6 +172,7 @@ def __init__(self, *args, **kwargs) -> None: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -178,6 +180,7 @@ def __init__(self, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -263,6 +266,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -270,6 +274,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/dense_embedding.py b/pinecone/core/openapi/inference/model/dense_embedding.py index 40ec736a..50b6a725 100644 --- a/pinecone/core/openapi/inference/model/dense_embedding.py +++ b/pinecone/core/openapi/inference/model/dense_embedding.py @@ -144,6 +144,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +163,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -185,6 +187,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +239,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +256,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/document.py b/pinecone/core/openapi/inference/model/document.py index f6897555..79ebb5d0 100644 --- a/pinecone/core/openapi/inference/model/document.py +++ b/pinecone/core/openapi/inference/model/document.py @@ -134,6 +134,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -152,6 +153,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -173,6 +175,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -220,6 +223,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -236,6 +240,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index 0b71a114..0141f9db 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -155,6 +155,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +174,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -248,6 +251,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request_inputs.py b/pinecone/core/openapi/inference/model/embed_request_inputs.py index b1d26258..55fa9f69 100644 --- a/pinecone/core/openapi/inference/model/embed_request_inputs.py +++ b/pinecone/core/openapi/inference/model/embed_request_inputs.py @@ -139,6 +139,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +158,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +180,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +229,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +246,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embedding.py b/pinecone/core/openapi/inference/model/embedding.py index 799dd9ea..8b0bf05b 100644 --- a/pinecone/core/openapi/inference/model/embedding.py +++ b/pinecone/core/openapi/inference/model/embedding.py @@ -219,6 +219,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -274,6 +275,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -290,6 +292,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list.py b/pinecone/core/openapi/inference/model/embeddings_list.py index 4c25f9f7..87df31f8 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list.py +++ b/pinecone/core/openapi/inference/model/embeddings_list.py @@ -160,6 +160,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -178,6 +179,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -203,6 +205,7 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -256,6 +259,7 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +276,7 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list_usage.py b/pinecone/core/openapi/inference/model/embeddings_list_usage.py index 901df955..6cdea666 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list_usage.py +++ b/pinecone/core/openapi/inference/model/embeddings_list_usage.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +182,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -244,6 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response.py b/pinecone/core/openapi/inference/model/error_response.py index f7657fae..b526e6a2 100644 --- a/pinecone/core/openapi/inference/model/error_response.py +++ b/pinecone/core/openapi/inference/model/error_response.py @@ -152,6 +152,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +195,7 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +247,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -260,6 +264,7 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response_error.py b/pinecone/core/openapi/inference/model/error_response_error.py index 0984cbea..595a5f1f 100644 --- a/pinecone/core/openapi/inference/model/error_response_error.py +++ b/pinecone/core/openapi/inference/model/error_response_error.py @@ -168,6 +168,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -186,6 +187,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -209,6 +211,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -261,6 +264,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -277,6 +281,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py index 9e4b076d..2d983cd9 100644 --- a/pinecone/core/openapi/inference/model/model_info.py +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -194,6 +194,7 @@ def _from_openapi_data( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -212,6 +213,7 @@ def _from_openapi_data( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -237,6 +239,7 @@ def _from_openapi_data( required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -300,6 +303,7 @@ def __init__( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -316,6 +320,7 @@ def __init__( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_list.py b/pinecone/core/openapi/inference/model/model_info_list.py index 2c2ca496..a47cb910 100644 --- a/pinecone/core/openapi/inference/model/model_info_list.py +++ b/pinecone/core/openapi/inference/model/model_info_list.py @@ -147,6 +147,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +166,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +188,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +254,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_metric.py b/pinecone/core/openapi/inference/model/model_info_metric.py index e84c8e19..0dbcbf1f 100644 --- a/pinecone/core/openapi/inference/model/model_info_metric.py +++ b/pinecone/core/openapi/inference/model/model_info_metric.py @@ -96,6 +96,7 @@ def discriminator(cls): required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -173,6 +174,7 @@ def __init__(self, *args, **kwargs) -> None: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -180,6 +182,7 @@ def __init__(self, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -265,6 +268,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -272,6 +276,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py index 63c3e2fd..a13fec67 100644 --- a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py +++ b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py @@ -102,6 +102,7 @@ def discriminator(cls): required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -179,6 +180,7 @@ def __init__(self, *args, **kwargs) -> None: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -186,6 +188,7 @@ def __init__(self, *args, **kwargs) -> None: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -271,6 +274,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: ) _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) @@ -278,6 +282,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py index 3d889cad..ec84f8ea 100644 --- a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py +++ b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py @@ -164,6 +164,7 @@ def _from_openapi_data( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +183,7 @@ def _from_openapi_data( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -207,6 +209,7 @@ def _from_openapi_data( required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -264,6 +267,7 @@ def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> No """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -280,6 +284,7 @@ def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> No self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/ranked_document.py b/pinecone/core/openapi/inference/model/ranked_document.py index 6ad8bbbb..e222d005 100644 --- a/pinecone/core/openapi/inference/model/ranked_document.py +++ b/pinecone/core/openapi/inference/model/ranked_document.py @@ -155,6 +155,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +174,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +198,7 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -248,6 +251,7 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +268,7 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index cb4fd6d4..f9539da4 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -167,6 +167,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -185,6 +186,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -209,6 +211,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -265,6 +268,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -281,6 +285,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result.py b/pinecone/core/openapi/inference/model/rerank_result.py index 53f56923..cc7e2b7c 100644 --- a/pinecone/core/openapi/inference/model/rerank_result.py +++ b/pinecone/core/openapi/inference/model/rerank_result.py @@ -157,6 +157,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -175,6 +176,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +201,7 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -251,6 +254,7 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +271,7 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result_usage.py b/pinecone/core/openapi/inference/model/rerank_result_usage.py index 4df237ab..02ae6320 100644 --- a/pinecone/core/openapi/inference/model/rerank_result_usage.py +++ b/pinecone/core/openapi/inference/model/rerank_result_usage.py @@ -141,6 +141,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -159,6 +160,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +182,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +231,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -244,6 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/sparse_embedding.py b/pinecone/core/openapi/inference/model/sparse_embedding.py index 6a570c44..a86574f9 100644 --- a/pinecone/core/openapi/inference/model/sparse_embedding.py +++ b/pinecone/core/openapi/inference/model/sparse_embedding.py @@ -152,6 +152,7 @@ def _from_openapi_data( """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -170,6 +171,7 @@ def _from_openapi_data( self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +196,7 @@ def _from_openapi_data( required_properties = set( [ "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -247,6 +250,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -263,6 +267,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) self._data_store = {} self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/openapi_support/model_utils.py b/pinecone/openapi_support/model_utils.py index 37cf1823..54cd9068 100644 --- a/pinecone/openapi_support/model_utils.py +++ b/pinecone/openapi_support/model_utils.py @@ -151,7 +151,10 @@ def set_attribute(self, name, value): # when listing indexes due to validation on the status field against # the allowed values in the enum. check_allowed_values(self.allowed_values, (name,), value) - if (name,) in self.validations: + if (name,) in self.validations and self._enforce_validations: + # Disabling validation on response makes the SDK + # less fragile if unexpected values are returned. In general, + # we want the SDK to display whatever is returned by the API. check_validations(self.validations, (name,), value, self._configuration) self.__dict__["_data_store"][name] = value From d342f7adb406f4aad81129d6e888f3ca1716ea56 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 16 May 2025 14:21:54 -0400 Subject: [PATCH 44/48] Format printed representationi of backup --- pinecone/db_control/models/backup_model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pinecone/db_control/models/backup_model.py b/pinecone/db_control/models/backup_model.py index 0d49d33e..59dec7ba 100644 --- a/pinecone/db_control/models/backup_model.py +++ b/pinecone/db_control/models/backup_model.py @@ -7,15 +7,15 @@ class BackupModel: def __init__(self, backup: OpenAPIBackupModel): self._backup = backup - def __str__(self): - return str(self._backup) - def __getattr__(self, attr): return getattr(self._backup, attr) def __getitem__(self, key): return self.__getattr__(key) + def __str__(self): + return self.__repr__() + def __repr__(self): return json.dumps(self.to_dict(), indent=4, default=custom_serializer) From dd4306e5a4a1c68c3e3d453b15667c32749853a7 Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Fri, 16 May 2025 14:30:23 -0400 Subject: [PATCH 45/48] Add retry configuration for asyncio (#492) ## Problem We want to use exponential backoff to retry failed requests made via PineconeAsyncio ## Solution - Add `aiohttp-retry` dependency without the `asyncio` extras group - Implement a JitterRetry class to calculate backoff intervals - The off-the-shelf JitteryRetry class has some odd behavior so i wanted to implement my own. This helps keep the behavior close to what we're doing for urllib3. - Intervals are roughly 0.1, 0.2, 0.4, 0.8 seconds (plus small jitter factor) - Manual testing with test server in `scripts/test-server.py` and `scripts/test-async-retry.py` ## Type of Change - [x] New feature (non-breaking change which adds functionality) ## Test Plan Added some scripts for manual testing --- pinecone/openapi_support/rest_aiohttp.py | 21 +++++++-- pinecone/openapi_support/rest_urllib3.py | 2 +- pinecone/openapi_support/retry_aiohttp.py | 44 +++++++++++++++++++ .../{retries.py => retry_urllib3.py} | 0 poetry.lock | 18 +++++++- pyproject.toml | 4 +- scripts/test-async-retry.py | 16 +++++++ tests/unit/openapi_support/test_retries.py | 2 +- 8 files changed, 98 insertions(+), 9 deletions(-) create mode 100644 pinecone/openapi_support/retry_aiohttp.py rename pinecone/openapi_support/{retries.py => retry_urllib3.py} (100%) create mode 100644 scripts/test-async-retry.py diff --git a/pinecone/openapi_support/rest_aiohttp.py b/pinecone/openapi_support/rest_aiohttp.py index 3cab099a..8b84e850 100644 --- a/pinecone/openapi_support/rest_aiohttp.py +++ b/pinecone/openapi_support/rest_aiohttp.py @@ -9,6 +9,8 @@ class AiohttpRestClient(RestClientInterface): def __init__(self, configuration: Configuration) -> None: try: import aiohttp + from aiohttp_retry import RetryClient + from .retry_aiohttp import JitterRetry except ImportError: raise ImportError( "Additional dependencies are required to use Pinecone with asyncio. Include these extra dependencies in your project by installing `pinecone[asyncio]`." @@ -28,8 +30,21 @@ def __init__(self, configuration: Configuration) -> None: else: self._session = aiohttp.ClientSession(connector=conn) + if configuration.retries is not None: + retry_options = configuration.retries + else: + retry_options = JitterRetry( + attempts=5, + start_timeout=0.1, + max_timeout=3.0, + statuses={500, 502, 503, 504}, + methods=None, # retry on all methods + exceptions={aiohttp.ClientError, aiohttp.ServerDisconnectedError}, + ) + self._retry_client = RetryClient(client_session=self._session, retry_options=retry_options) + async def close(self): - await self._session.close() + await self._retry_client.close() async def request( self, @@ -48,7 +63,7 @@ async def request( if "application/x-ndjson" in headers.get("Content-Type", "").lower(): ndjson_data = "\n".join(json.dumps(record) for record in body) - async with self._session.request( + async with self._retry_client.request( method, url, params=query_params, headers=headers, data=ndjson_data ) as resp: content = await resp.read() @@ -57,7 +72,7 @@ async def request( ) else: - async with self._session.request( + async with self._retry_client.request( method, url, params=query_params, headers=headers, json=body ) as resp: content = await resp.read() diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index f310ca99..3f718347 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -8,7 +8,7 @@ from .rest_utils import raise_exceptions_or_return, RESTResponse, RestClientInterface import urllib3 -from .retries import JitterRetry +from .retry_urllib3 import JitterRetry from .exceptions import PineconeApiException, PineconeApiValueError diff --git a/pinecone/openapi_support/retry_aiohttp.py b/pinecone/openapi_support/retry_aiohttp.py new file mode 100644 index 00000000..2b3019e7 --- /dev/null +++ b/pinecone/openapi_support/retry_aiohttp.py @@ -0,0 +1,44 @@ +import random +from typing import Optional +from aiohttp_retry import RetryOptionsBase, EvaluateResponseCallbackType, ClientResponse +import logging + +logger = logging.getLogger(__name__) + + +class JitterRetry(RetryOptionsBase): + """https://github.com/inyutin/aiohttp_retry/issues/44.""" + + def __init__( + self, + attempts: int = 3, # How many times we should retry + start_timeout: float = 0.1, # Base timeout time, then it exponentially grow + max_timeout: float = 5.0, # Max possible timeout between tries + statuses: Optional[set[int]] = None, # On which statuses we should retry + exceptions: Optional[set[type[Exception]]] = None, # On which exceptions we should retry + methods: Optional[set[str]] = None, # On which HTTP methods we should retry + retry_all_server_errors: bool = True, + evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None, + ) -> None: + super().__init__( + attempts=attempts, + statuses=statuses, + exceptions=exceptions, + methods=methods, + retry_all_server_errors=retry_all_server_errors, + evaluate_response_callback=evaluate_response_callback, + ) + + self._start_timeout: float = start_timeout + self._max_timeout: float = max_timeout + + def get_timeout( + self, + attempt: int, + response: Optional[ClientResponse] = None, # noqa: ARG002 + ) -> float: + logger.debug(f"JitterRetry get_timeout: attempt={attempt}, response={response}") + """Return timeout with exponential backoff.""" + jitter = random.uniform(0, 0.1) + timeout = self._start_timeout * (2 ** (attempt - 1)) + return min(timeout + jitter, self._max_timeout) diff --git a/pinecone/openapi_support/retries.py b/pinecone/openapi_support/retry_urllib3.py similarity index 100% rename from pinecone/openapi_support/retries.py rename to pinecone/openapi_support/retry_urllib3.py diff --git a/poetry.lock b/poetry.lock index 2e4de34b..823ed4af 100644 --- a/poetry.lock +++ b/poetry.lock @@ -109,6 +109,20 @@ yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +[[package]] +name = "aiohttp-retry" +version = "2.9.1" +description = "Simple retry client for aiohttp" +optional = true +python-versions = ">=3.7" +files = [ + {file = "aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54"}, + {file = "aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1"}, +] + +[package.dependencies] +aiohttp = "*" + [[package]] name = "aiosignal" version = "1.3.1" @@ -1970,10 +1984,10 @@ multidict = ">=4.0" propcache = ">=0.2.0" [extras] -asyncio = ["aiohttp"] +asyncio = ["aiohttp", "aiohttp-retry"] grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protobuf", "protoc-gen-openapiv2"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "0145fb2ae02a1cdd6fe06b191a6761dcee4f4c67fe057b48d6b501d7b0b504da" +content-hash = "cc8b764abfc3d9ba774410ef118817c736c3c74a2bfa7f9f32a462628d804739" diff --git a/pyproject.toml b/pyproject.toml index 0a239e3a..7b987cbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ protoc-gen-openapiv2 = {version = "^0.0.1", optional = true } pinecone-plugin-interface = "^0.0.7" python-dateutil = ">=2.5.3" aiohttp = { version = ">=3.9.0", optional = true } +aiohttp-retry = { version = "^2.9.1", optional = true } [tool.poetry.group.types] optional = true @@ -102,10 +103,9 @@ vprof = "^0.38" tuna = "^0.5.11" python-dotenv = "^1.1.0" - [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] -asyncio = ["aiohttp"] +asyncio = ["aiohttp", "aiohttp-retry"] [build-system] requires = ["poetry-core"] diff --git a/scripts/test-async-retry.py b/scripts/test-async-retry.py new file mode 100644 index 00000000..ca5f9bb4 --- /dev/null +++ b/scripts/test-async-retry.py @@ -0,0 +1,16 @@ +import dotenv +import asyncio +import logging +from pinecone import PineconeAsyncio + +dotenv.load_dotenv() + +logging.basicConfig(level=logging.DEBUG) + + +async def main(): + async with PineconeAsyncio(host="http://localhost:8000") as pc: + await pc.db.index.list() + + +asyncio.run(main()) diff --git a/tests/unit/openapi_support/test_retries.py b/tests/unit/openapi_support/test_retries.py index 5f31221d..ff624938 100644 --- a/tests/unit/openapi_support/test_retries.py +++ b/tests/unit/openapi_support/test_retries.py @@ -2,7 +2,7 @@ from unittest.mock import patch, MagicMock from urllib3.exceptions import MaxRetryError from urllib3.util.retry import Retry -from pinecone.openapi_support.retries import JitterRetry +from pinecone.openapi_support.retry_urllib3 import JitterRetry def test_jitter_retry_backoff(): From b389c0c8f0adf8776e8fc4c393bbf7c6cd5ca64a Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 16 May 2025 16:22:37 -0400 Subject: [PATCH 46/48] Skip tests when only docs changes present --- .github/workflows/pr.yaml | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 1a24c615..3c434042 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -1,10 +1,36 @@ name: Pull Request on: - pull_request: {} + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + - '*.txt' + - '*.html' + - '*.css' + - '*.js' + - '*.png' + - '*.jpg' + - '*.jpeg' + - '*.gif' + - '*.svg' push: branches: - main + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + - '*.txt' + - '*.html' + - '*.css' + - '*.js' + - '*.png' + - '*.jpg' + - '*.jpeg' + - '*.gif' + - '*.svg' workflow_dispatch: {} jobs: From a28e13d64cb2c9682ef3c5246905d187783cda41 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 16 May 2025 16:30:08 -0400 Subject: [PATCH 47/48] Cancel CI jobs in progress on subsequent pushes --- .github/workflows/pr.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 3c434042..e691e5fd 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -33,6 +33,10 @@ on: - '*.svg' workflow_dispatch: {} +concurrency: + group: 'ci-${{ github.workflow }}-${{ github.ref }}' + cancel-in-progress: true + jobs: linting: uses: './.github/workflows/lint.yaml' From 0a07f51f93eb42e8b7c8cc778cc8e46fc71bf9be Mon Sep 17 00:00:00 2001 From: Jennifer Hamon Date: Fri, 16 May 2025 16:44:42 -0400 Subject: [PATCH 48/48] Update the `upgrading.md` doc with v7 release notes (#493) ## Problem Need to update docs for release ## Solution ![kermit-the-frog-kermit-typing](https://github.com/user-attachments/assets/00921776-3740-4b70-9fbd-b32b23fb38d0) ## Type of Change - [x] Non-code change (docs, etc) --- README.md | 5 +- docs/upgrading.md | 193 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 194 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 60b7b3ae..747276ee 100644 --- a/README.md +++ b/README.md @@ -192,11 +192,8 @@ response = index.search_records( ## Pinecone Assistant ### Installing the Pinecone Assistant Python plugin -To interact with Pinecone Assistant using the Python SDK, install the `pinecone-plugin-assistant` package: +The `pinecone-plugin-assistant` package is now bundled by default when installing `pinecone`. It does not need to be installed separately in order to use Pinecone Assistant. -```shell -pip install --upgrade pinecone pinecone-plugin-assistant -``` For more information on Pinecone Assistant, see the [Pinecone Assistant documentation](https://docs.pinecone.io/guides/assistant/overview). diff --git a/docs/upgrading.md b/docs/upgrading.md index 70e9d588..de24a91d 100644 --- a/docs/upgrading.md +++ b/docs/upgrading.md @@ -3,6 +3,199 @@ > Please remove `pinecone-client` from your project dependencies and add `pinecone` instead to get > the latest updates. +# Upgrading from `6.x` to `7.x` + +There are no intentional breaking changes when moving from v6 to v7 of the SDK. The major version bump reflects the move from calling the `2025-01` to the `2025-04` version of the underlying API. + +Some internals of the client have been reorganized or moved, but we've made an effort to alias everything and show warning messages when appropriate. If you experience any unexpected breaking changes that cause you friction while upgrading, let us know and we'll try to smooth it out. + +## Useful additions in `7.x` + +New Features: +- [Pinecone Assistant](https://www.pinecone.io/product/assistant/): The assistant plugin is now bundled by default. You can simply start using it without installing anything additional. +- [Inference API](https://docs.pinecone.io/guides/get-started/overview#inference): List/view models from the model gallery via API +- [Backups](https://docs.pinecone.io/guides/manage-data/backups-overview): + - Create backup from serverless index + - Create serverless index from backup + - List/view backups + - List/view backup restore jobs +- [Bring Your Own Cloud (BYOC)](https://docs.pinecone.io/guides/production/bring-your-own-cloud): + - Create, list, describe, and delete BYOC indexes + +Other improvements: +- ~70% faster client instantiation time thanks to extensive refactoring to implement lazy loading. This means your app won't waste time loading code for features you're not using. +- Retries with exponential backoff are now enabled by default for REST calls (implemented for both urllib3 and aiohttp). +- We're following [PEP 561](https://typing.python.org/en/latest/spec/distributing.html#packaging-typed-libraries) and adding a `py.typed` marker file to indicate inline type information is present in the package. We're still working toward reaching full coverage with our type hints, but including this file allows some tools to find the inline definitions we have already implemented. + + +### Backups for Serverless Indexes + +You can create backups from your serverless indexes and use these backups to create new indexes. Some fields such as `record_count` are initially empty but will be populated by the time a backup is ready for use. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +index_name = 'example-index' +if not pc.has_index(name=index_name): + raise Exception('An index must exist before backing it up') + +backup = pc.create_backup( + index_name=index_name, + backup_name='example-backup', + description='testing out backups' +) +# { +# "backup_id": "4698a618-7e56-4a44-93bc-fc8f1371aa36", +# "source_index_name": "example-index", +# "source_index_id": "ec6fd44c-ab45-4873-97f3-f6b44b67e9bc", +# "status": "Initializing", +# "cloud": "aws", +# "region": "us-east-1", +# "tags": {}, +# "name": "example-backup", +# "description": "testing out backups", +# "dimension": null, +# "record_count": null, +# "namespace_count": null, +# "size_bytes": null, +# "created_at": "2025-05-16T18:44:28.480671533Z" +# } +``` + +Check the status of a backup + +```python +from pinecone import Pinecone + +pc = Pinecone() + +pc.describe_backup(backup_id='4698a618-7e56-4a44-93bc-fc8f1371aa36') +# { +# "backup_id": "4698a618-7e56-4a44-93bc-fc8f1371aa36", +# "source_index_name": "example-index", +# "source_index_id": "ec6fd44c-ab45-4873-97f3-f6b44b67e9bc", +# "status": "Ready", +# "cloud": "aws", +# "region": "us-east-1", +# "tags": {}, +# "name": "example-backup", +# "description": "testing out backups", +# "dimension": 768, +# "record_count": 1000, +# "namespace_count": 1, +# "size_bytes": 289656, +# "created_at": "2025-05-16T18:44:28.480691Z" +# } +``` + +You can use `list_backups` to see all of your backups and their current status. If you have a large number of backups, results will be paginated. You can control the pagination with optional parameters for `limit` and `pagination_token`. + +```python + +from pinecone import Pinecone + +pc = Pinecone() + +# All backups +pc.list_backups() + +# Only backups associated with a particular index +pc.list_backups(index_name='my-index') +``` + +To create an index from a backup, use `create_index_from_backup`. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +pc.create_index_from_backup( + name='index-from-backup', + backup_id='4698a618-7e56-4a44-93bc-fc8f1371aa36', + deletion_protection = "disabled", + tags={'env': 'testing'}, +) +``` + +Under the hood, a restore job is created to handle taking data from your backup and loading it into the newly created serverless index. You can check status of pending restore jobs with `pc.list_restore_jobs()` or `pc.describe_restore_job()` + +### Explore and discover models available in our Inference API + +You can now fetch a dynamic list of models supported by the Inference API. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +# List all models +models = pc.inference.list_models() + +# List models, with model type filtering +models = pc.inference.list_models(type="embed") +models = pc.inference.list_models(type="rerank") + +# List models, with vector type filtering +models = pc.inference.list_models(vector_type="dense") +models = pc.inference.list_models(vector_type="sparse") + +# List models, with both type and vector type filtering +models = pc.inference.list_models(type="rerank", vector_type="dense") +``` + +Or, if you know the name of a model, you can get just those details + +``` +pc.inference.get_model(model_name='pinecone-rerank-v0') +# { +# "model": "pinecone-rerank-v0", +# "short_description": "A state of the art reranking model that out-performs competitors on widely accepted benchmarks. It can handle chunks up to 512 tokens (1-2 paragraphs)", +# "type": "rerank", +# "supported_parameters": [ +# { +# "parameter": "truncate", +# "type": "one_of", +# "value_type": "string", +# "required": false, +# "default": "END", +# "allowed_values": [ +# "END", +# "NONE" +# ] +# } +# ], +# "modality": "text", +# "max_sequence_length": 512, +# "max_batch_size": 100, +# "provider_name": "Pinecone", +# "supported_metrics": [] +# } +``` + +### Client support for BYOC (Bring Your Own Cloud) + +For customers using our [BYOC offering](https://docs.pinecone.io/guides/production/bring-your-own-cloud), you can now create indexes and list/describe indexes you have created in your cloud. + +```python +from pinecone import Pinecone, ByocSpec + +pc = Pinecone() + +pc.create_index( + name='example-byoc-index', + dimension=768, + metric='cosine', + spec=ByocSpec(environment='my-private-env'), + tags={ + 'env': 'testing' + }, + deletion_protection='enabled' +) +``` + # Upgrading from `5.x` to `6.x` ## Breaking changes in 6.x