diff --git a/.github/actions/cleanup-all/action.yml b/.github/actions/cleanup-all/action.yml index b6322e88..a62da2c8 100644 --- a/.github/actions/cleanup-all/action.yml +++ b/.github/actions/cleanup-all/action.yml @@ -9,6 +9,10 @@ inputs: description: 'Delete all indexes and collections' required: false default: 'false' + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: using: 'composite' @@ -25,3 +29,4 @@ runs: env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} DELETE_ALL: ${{ inputs.DELETE_ALL }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} diff --git a/.github/actions/create-index-legacy/action.yml b/.github/actions/create-index-legacy/action.yml index 8bfc9500..a22d5679 100644 --- a/.github/actions/create-index-legacy/action.yml +++ b/.github/actions/create-index-legacy/action.yml @@ -23,6 +23,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: using: 'composite' @@ -36,7 +40,7 @@ runs: shell: bash run: | pip install pinecone-client==${{ inputs.pinecone_client_version }} - + - name: Create index id: create-index shell: bash @@ -44,6 +48,7 @@ runs: env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} PINECONE_ENVIRONMENT: ${{ inputs.PINECONE_ENVIRONMENT }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} INDEX_NAME: ${{ inputs.index_name }} DIMENSION: ${{ inputs.dimension }} METRIC: ${{ inputs.metric } diff --git a/.github/actions/create-index/action.yml b/.github/actions/create-index/action.yml index b81dc1b9..95cfc2be 100644 --- a/.github/actions/create-index/action.yml +++ b/.github/actions/create-index/action.yml @@ -25,6 +25,11 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + outputs: index_name: @@ -48,6 +53,7 @@ runs: run: poetry run python3 scripts/create.py env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} NAME_PREFIX: ${{ inputs.name_prefix }} REGION: ${{ inputs.region }} CLOUD: ${{ inputs.cloud }} diff --git a/.github/actions/delete-index/action.yml b/.github/actions/delete-index/action.yml index 358fb483..9e35c83a 100644 --- a/.github/actions/delete-index/action.yml +++ b/.github/actions/delete-index/action.yml @@ -8,6 +8,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' runs: @@ -26,4 +30,5 @@ runs: run: poetry run python3 scripts/delete.py env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} - INDEX_NAME: ${{ inputs.index_name }} \ No newline at end of file + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} + INDEX_NAME: ${{ inputs.index_name }} diff --git a/.github/actions/test-data-asyncio/action.yaml b/.github/actions/test-data-asyncio/action.yaml index fb349ca9..e81c7c9a 100644 --- a/.github/actions/test-data-asyncio/action.yaml +++ b/.github/actions/test-data-asyncio/action.yaml @@ -15,6 +15,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' python_version: description: 'The version of Python to use' required: false @@ -41,6 +45,7 @@ runs: run: poetry run pytest tests/integration/data_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} USE_GRPC: ${{ inputs.use_grpc }} SPEC: ${{ inputs.spec }} FRESHNESS_TIMEOUT_SECONDS: ${{ inputs.freshness_timeout_seconds }} diff --git a/.github/actions/test-data-plane/action.yaml b/.github/actions/test-data-plane/action.yaml index 3a1c1204..0cbc3023 100644 --- a/.github/actions/test-data-plane/action.yaml +++ b/.github/actions/test-data-plane/action.yaml @@ -18,6 +18,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' python_version: description: 'The version of Python to use' required: false @@ -55,6 +59,7 @@ runs: run: poetry run pytest tests/integration/data --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} USE_GRPC: ${{ inputs.use_grpc }} METRIC: ${{ inputs.metric }} SPEC: ${{ inputs.spec }} diff --git a/.github/actions/test-dependency-asyncio-rest/action.yaml b/.github/actions/test-dependency-asyncio-rest/action.yaml index 192614e1..1efe4a8a 100644 --- a/.github/actions/test-dependency-asyncio-rest/action.yaml +++ b/.github/actions/test-dependency-asyncio-rest/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -43,4 +47,5 @@ runs: command: poetry run pytest tests/dependency/asyncio-rest -s -v env: PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' INDEX_NAME: '${{ inputs.index_name }}' diff --git a/.github/actions/test-dependency-grpc/action.yaml b/.github/actions/test-dependency-grpc/action.yaml index 08afcb18..4ba6d9ac 100644 --- a/.github/actions/test-dependency-grpc/action.yaml +++ b/.github/actions/test-dependency-grpc/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -63,4 +67,5 @@ runs: command: poetry run pytest tests/dependency/grpc -s -v env: PINECONE_API_KEY: ${{ inputs.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: ${{ inputs.PINECONE_ADDITIONAL_HEADERS }} INDEX_NAME: ${{ inputs.index_name }} diff --git a/.github/actions/test-dependency-rest/action.yaml b/.github/actions/test-dependency-rest/action.yaml index 0ba24446..a3487cf3 100644 --- a/.github/actions/test-dependency-rest/action.yaml +++ b/.github/actions/test-dependency-rest/action.yaml @@ -5,6 +5,10 @@ inputs: PINECONE_API_KEY: description: 'The Pinecone API key' required: true + PINECONE_ADDITIONAL_HEADERS: + description: 'Additional headers to send with the request' + required: false + default: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' index_name: description: 'The name of the index' required: true @@ -42,4 +46,5 @@ runs: command: poetry run pytest tests/dependency/rest -s -v env: PINECONE_API_KEY: '${{ inputs.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '${{ inputs.PINECONE_ADDITIONAL_HEADERS }}' INDEX_NAME: '${{ inputs.index_name }}' diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 1a24c615..e691e5fd 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -1,12 +1,42 @@ name: Pull Request on: - pull_request: {} + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + - '*.txt' + - '*.html' + - '*.css' + - '*.js' + - '*.png' + - '*.jpg' + - '*.jpeg' + - '*.gif' + - '*.svg' push: branches: - main + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + - '*.txt' + - '*.html' + - '*.css' + - '*.js' + - '*.png' + - '*.jpg' + - '*.jpeg' + - '*.gif' + - '*.svg' workflow_dispatch: {} +concurrency: + group: 'ci-${{ github.workflow }}-${{ github.ref }}' + cancel-in-progress: true + jobs: linting: uses: './.github/workflows/lint.yaml' diff --git a/.github/workflows/testing-dependency-asyncio.yaml b/.github/workflows/testing-dependency-asyncio.yaml index dd0b167b..c1b9e757 100644 --- a/.github/workflows/testing-dependency-asyncio.yaml +++ b/.github/workflows/testing-dependency-asyncio.yaml @@ -27,4 +27,5 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' aiohttp_version: '${{ matrix.aiohttp_version }}' diff --git a/.github/workflows/testing-dependency-grpc.yaml b/.github/workflows/testing-dependency-grpc.yaml index 261bf8dd..80be0065 100644 --- a/.github/workflows/testing-dependency-grpc.yaml +++ b/.github/workflows/testing-dependency-grpc.yaml @@ -53,6 +53,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' @@ -86,6 +87,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' @@ -118,6 +120,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' grpcio_version: '${{ matrix.grpcio_version }}' lz4_version: '${{ matrix.lz4_version }}' protobuf_version: '${{ matrix.protobuf_version }}' diff --git a/.github/workflows/testing-dependency-rest.yaml b/.github/workflows/testing-dependency-rest.yaml index c3662cff..403b6ee8 100644 --- a/.github/workflows/testing-dependency-rest.yaml +++ b/.github/workflows/testing-dependency-rest.yaml @@ -30,6 +30,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' @@ -53,6 +54,7 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' dependency-matrix-rest-313: @@ -75,4 +77,5 @@ jobs: python_version: '${{ matrix.python_version }}' index_name: '${{ inputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' urllib3_version: '${{ matrix.urllib3_version }}' diff --git a/.github/workflows/testing-dependency.yaml b/.github/workflows/testing-dependency.yaml index 73fe8061..21d613b4 100644 --- a/.github/workflows/testing-dependency.yaml +++ b/.github/workflows/testing-dependency.yaml @@ -19,6 +19,7 @@ jobs: name_prefix: depstest-${{ github.run_number }} dimension: 2 PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' dependency-test-rest: uses: './.github/workflows/testing-dependency-rest.yaml' @@ -55,3 +56,4 @@ jobs: with: index_name: '${{ needs.deps-test-setup.outputs.index_name }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' diff --git a/.github/workflows/testing-integration-asyncio.yaml b/.github/workflows/testing-integration-asyncio.yaml index 8c8fab60..b45f789f 100644 --- a/.github/workflows/testing-integration-asyncio.yaml +++ b/.github/workflows/testing-integration-asyncio.yaml @@ -29,6 +29,7 @@ jobs: run: poetry run pytest tests/integration/data_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' db-control-asyncio: name: db_control asyncio @@ -51,6 +52,7 @@ jobs: include_asyncio: true include_dev: true - name: 'db_control asyncio' - run: poetry run pytest tests/integration/control_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG + run: poetry run pytest tests/integration/control_asyncio/*.py --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 8275a5f1..f71ef3a1 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -3,6 +3,36 @@ name: "Integration Tests" workflow_call: {} jobs: + reorg: + name: Resource ${{ matrix.test_suite }} + runs-on: ubuntu-latest + env: + PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + strategy: + fail-fast: false + matrix: + python_version: [3.9, 3.12] + test_suite: + - control/resources/index + - control/resources/collections + - control/resources/backup + - control/resources/restore_job + - control_asyncio/resources/index + - control_asyncio/resources/backup + - control_asyncio/resources/restore_job + steps: + - uses: actions/checkout@v4 + - name: 'Set up Python ${{ matrix.python_version }}' + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python_version }}' + - name: Setup Poetry + uses: ./.github/actions/setup-poetry + with: + include_asyncio: true + - name: 'Run tests' + run: poetry run pytest tests/integration/${{ matrix.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG inference: name: Inference tests @@ -23,8 +53,31 @@ jobs: - name: 'Run integration tests' run: poetry run pytest tests/integration/inference --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: - PINECONE_DEBUG_CURL: 'true' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + + plugins: + name: Plugin installation + runs-on: ubuntu-latest + strategy: + matrix: + python_version: [3.9, 3.12] + steps: + - uses: actions/checkout@v4 + - name: 'Set up Python ${{ matrix.python_version }}' + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python_version }}' + - name: Setup Poetry + uses: ./.github/actions/setup-poetry + with: + include_asyncio: true + - name: 'Run integration tests' + run: poetry run pytest tests/integration/plugins --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG + env: + PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' + dependency-test-asyncio: @@ -37,6 +90,7 @@ jobs: runs-on: ubuntu-latest needs: - inference + - plugins strategy: fail-fast: false matrix: @@ -53,6 +107,7 @@ jobs: metric: 'cosine' spec: '{ "serverless": { "region": "us-west-2", "cloud": "aws" }}' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' freshness_timeout_seconds: 600 skip_weird_id_tests: 'true' @@ -94,6 +149,7 @@ jobs: runs-on: ubuntu-latest needs: - inference + - plugins strategy: matrix: testConfig: @@ -115,7 +171,7 @@ jobs: - name: 'Run integration tests (REST)' run: poetry run pytest tests/integration/control/serverless --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: - PINECONE_DEBUG_CURL: 'true' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' + PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client", "x-environment": "preprod-aws-0"}' SERVERLESS_CLOUD: '${{ matrix.testConfig.serverless.cloud }}' SERVERLESS_REGION: '${{ matrix.testConfig.serverless.region }}' diff --git a/README.md b/README.md index abaf4a00..776db44e 100644 --- a/README.md +++ b/README.md @@ -192,11 +192,8 @@ response = index.search_records( ## Pinecone Assistant ### Installing the Pinecone Assistant Python plugin -To interact with Pinecone Assistant using the Python SDK, install the `pinecone-plugin-assistant` package: +The `pinecone-plugin-assistant` package is now bundled by default when installing `pinecone`. It does not need to be installed separately in order to use Pinecone Assistant. -```shell -pip install --upgrade pinecone pinecone-plugin-assistant -``` For more information on Pinecone Assistant, see the [Pinecone Assistant documentation](https://docs.pinecone.io/guides/assistant/overview). diff --git a/codegen/apis b/codegen/apis index eb79d8ea..7e21ca9a 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit eb79d8ea0c146aebe36c3769e19cbe9618db2d54 +Subproject commit 7e21ca9adb6a530ce11909d6209d69551f86e9bd diff --git a/codegen/build-oas.sh b/codegen/build-oas.sh index 04bc83b7..d4f83784 100755 --- a/codegen/build-oas.sh +++ b/codegen/build-oas.sh @@ -2,21 +2,13 @@ set -eux -o pipefail -version=$1 # e.g. 2024-07 -is_early_access=$2 # e.g. true - -# if is_early_access is true, add the "ea" module -if [ "$is_early_access" = "true" ]; then - destination="pinecone/core_ea/openapi" - modules=("db_control" "db_data" "inference") - py_module_name="core_ea" - template_dir="codegen/python-oas-templates/templates5.2.0" -else - destination="pinecone/core/openapi" - modules=("db_control" "db_data" "inference") - py_module_name="core" - template_dir="codegen/python-oas-templates/templates5.2.0" -fi +version=$1 # e.g. 2025-01 + + +destination="pinecone/core/openapi" +modules=("db_control" "db_data" "inference") +py_module_name="core" +template_dir="codegen/python-oas-templates/templates5.2.0" build_dir="build" diff --git a/codegen/python-oas-templates b/codegen/python-oas-templates index 0f6ff685..57a4c44d 160000 --- a/codegen/python-oas-templates +++ b/codegen/python-oas-templates @@ -1 +1 @@ -Subproject commit 0f6ff68585355dd11e959e05859928d878d7854b +Subproject commit 57a4c44d6f18bbabedfe25aa173359c37fb9f705 diff --git a/docs/upgrading.md b/docs/upgrading.md index 70e9d588..de24a91d 100644 --- a/docs/upgrading.md +++ b/docs/upgrading.md @@ -3,6 +3,199 @@ > Please remove `pinecone-client` from your project dependencies and add `pinecone` instead to get > the latest updates. +# Upgrading from `6.x` to `7.x` + +There are no intentional breaking changes when moving from v6 to v7 of the SDK. The major version bump reflects the move from calling the `2025-01` to the `2025-04` version of the underlying API. + +Some internals of the client have been reorganized or moved, but we've made an effort to alias everything and show warning messages when appropriate. If you experience any unexpected breaking changes that cause you friction while upgrading, let us know and we'll try to smooth it out. + +## Useful additions in `7.x` + +New Features: +- [Pinecone Assistant](https://www.pinecone.io/product/assistant/): The assistant plugin is now bundled by default. You can simply start using it without installing anything additional. +- [Inference API](https://docs.pinecone.io/guides/get-started/overview#inference): List/view models from the model gallery via API +- [Backups](https://docs.pinecone.io/guides/manage-data/backups-overview): + - Create backup from serverless index + - Create serverless index from backup + - List/view backups + - List/view backup restore jobs +- [Bring Your Own Cloud (BYOC)](https://docs.pinecone.io/guides/production/bring-your-own-cloud): + - Create, list, describe, and delete BYOC indexes + +Other improvements: +- ~70% faster client instantiation time thanks to extensive refactoring to implement lazy loading. This means your app won't waste time loading code for features you're not using. +- Retries with exponential backoff are now enabled by default for REST calls (implemented for both urllib3 and aiohttp). +- We're following [PEP 561](https://typing.python.org/en/latest/spec/distributing.html#packaging-typed-libraries) and adding a `py.typed` marker file to indicate inline type information is present in the package. We're still working toward reaching full coverage with our type hints, but including this file allows some tools to find the inline definitions we have already implemented. + + +### Backups for Serverless Indexes + +You can create backups from your serverless indexes and use these backups to create new indexes. Some fields such as `record_count` are initially empty but will be populated by the time a backup is ready for use. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +index_name = 'example-index' +if not pc.has_index(name=index_name): + raise Exception('An index must exist before backing it up') + +backup = pc.create_backup( + index_name=index_name, + backup_name='example-backup', + description='testing out backups' +) +# { +# "backup_id": "4698a618-7e56-4a44-93bc-fc8f1371aa36", +# "source_index_name": "example-index", +# "source_index_id": "ec6fd44c-ab45-4873-97f3-f6b44b67e9bc", +# "status": "Initializing", +# "cloud": "aws", +# "region": "us-east-1", +# "tags": {}, +# "name": "example-backup", +# "description": "testing out backups", +# "dimension": null, +# "record_count": null, +# "namespace_count": null, +# "size_bytes": null, +# "created_at": "2025-05-16T18:44:28.480671533Z" +# } +``` + +Check the status of a backup + +```python +from pinecone import Pinecone + +pc = Pinecone() + +pc.describe_backup(backup_id='4698a618-7e56-4a44-93bc-fc8f1371aa36') +# { +# "backup_id": "4698a618-7e56-4a44-93bc-fc8f1371aa36", +# "source_index_name": "example-index", +# "source_index_id": "ec6fd44c-ab45-4873-97f3-f6b44b67e9bc", +# "status": "Ready", +# "cloud": "aws", +# "region": "us-east-1", +# "tags": {}, +# "name": "example-backup", +# "description": "testing out backups", +# "dimension": 768, +# "record_count": 1000, +# "namespace_count": 1, +# "size_bytes": 289656, +# "created_at": "2025-05-16T18:44:28.480691Z" +# } +``` + +You can use `list_backups` to see all of your backups and their current status. If you have a large number of backups, results will be paginated. You can control the pagination with optional parameters for `limit` and `pagination_token`. + +```python + +from pinecone import Pinecone + +pc = Pinecone() + +# All backups +pc.list_backups() + +# Only backups associated with a particular index +pc.list_backups(index_name='my-index') +``` + +To create an index from a backup, use `create_index_from_backup`. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +pc.create_index_from_backup( + name='index-from-backup', + backup_id='4698a618-7e56-4a44-93bc-fc8f1371aa36', + deletion_protection = "disabled", + tags={'env': 'testing'}, +) +``` + +Under the hood, a restore job is created to handle taking data from your backup and loading it into the newly created serverless index. You can check status of pending restore jobs with `pc.list_restore_jobs()` or `pc.describe_restore_job()` + +### Explore and discover models available in our Inference API + +You can now fetch a dynamic list of models supported by the Inference API. + +```python +from pinecone import Pinecone + +pc = Pinecone() + +# List all models +models = pc.inference.list_models() + +# List models, with model type filtering +models = pc.inference.list_models(type="embed") +models = pc.inference.list_models(type="rerank") + +# List models, with vector type filtering +models = pc.inference.list_models(vector_type="dense") +models = pc.inference.list_models(vector_type="sparse") + +# List models, with both type and vector type filtering +models = pc.inference.list_models(type="rerank", vector_type="dense") +``` + +Or, if you know the name of a model, you can get just those details + +``` +pc.inference.get_model(model_name='pinecone-rerank-v0') +# { +# "model": "pinecone-rerank-v0", +# "short_description": "A state of the art reranking model that out-performs competitors on widely accepted benchmarks. It can handle chunks up to 512 tokens (1-2 paragraphs)", +# "type": "rerank", +# "supported_parameters": [ +# { +# "parameter": "truncate", +# "type": "one_of", +# "value_type": "string", +# "required": false, +# "default": "END", +# "allowed_values": [ +# "END", +# "NONE" +# ] +# } +# ], +# "modality": "text", +# "max_sequence_length": 512, +# "max_batch_size": 100, +# "provider_name": "Pinecone", +# "supported_metrics": [] +# } +``` + +### Client support for BYOC (Bring Your Own Cloud) + +For customers using our [BYOC offering](https://docs.pinecone.io/guides/production/bring-your-own-cloud), you can now create indexes and list/describe indexes you have created in your cloud. + +```python +from pinecone import Pinecone, ByocSpec + +pc = Pinecone() + +pc.create_index( + name='example-byoc-index', + dimension=768, + metric='cosine', + spec=ByocSpec(environment='my-private-env'), + tags={ + 'env': 'testing' + }, + deletion_protection='enabled' +) +``` + # Upgrading from `5.x` to `6.x` ## Breaking changes in 6.x diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 13a65bd1..78adbf0e 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -2,22 +2,152 @@ .. include:: ../pdoc/README.md """ -from .deprecated_plugins import check_for_deprecated_plugins +from .deprecated_plugins import check_for_deprecated_plugins as _check_for_deprecated_plugins from .deprecation_warnings import * -from .config import * +from .pinecone import Pinecone +from .pinecone_asyncio import PineconeAsyncio from .exceptions import * -from .control import * -from .data import * -from .models import * -from .enums import * from .utils import __version__ import logging +# Set up lazy import handling +from .utils.lazy_imports import setup_lazy_imports as _setup_lazy_imports + +_inference_lazy_imports = { + "RerankModel": ("pinecone.inference", "RerankModel"), + "EmbedModel": ("pinecone.inference", "EmbedModel"), + "ModelInfo": ("pinecone.inference.models", "ModelInfo"), + "ModelInfoList": ("pinecone.inference.models", "ModelInfoList"), + "EmbeddingsList": ("pinecone.inference.models", "EmbeddingsList"), + "RerankResult": ("pinecone.inference.models", "RerankResult"), +} + +_db_data_lazy_imports = { + "Vector": ("pinecone.db_data.dataclasses", "Vector"), + "SparseValues": ("pinecone.db_data.dataclasses", "SparseValues"), + "SearchQuery": ("pinecone.db_data.dataclasses", "SearchQuery"), + "SearchQueryVector": ("pinecone.db_data.dataclasses", "SearchQueryVector"), + "SearchRerank": ("pinecone.db_data.dataclasses", "SearchRerank"), + "FetchResponse": ("pinecone.db_data.dataclasses", "FetchResponse"), + "DeleteRequest": ("pinecone.db_data.models", "DeleteRequest"), + "DescribeIndexStatsRequest": ("pinecone.db_data.models", "DescribeIndexStatsRequest"), + "DescribeIndexStatsResponse": ("pinecone.db_data.models", "IndexDescription"), + "RpcStatus": ("pinecone.db_data.models", "RpcStatus"), + "ScoredVector": ("pinecone.db_data.models", "ScoredVector"), + "SingleQueryResults": ("pinecone.db_data.models", "SingleQueryResults"), + "QueryRequest": ("pinecone.db_data.models", "QueryRequest"), + "QueryResponse": ("pinecone.db_data.models", "QueryResponse"), + "UpsertResponse": ("pinecone.db_data.models", "UpsertResponse"), + "UpdateRequest": ("pinecone.db_data.models", "UpdateRequest"), + "ImportErrorMode": ("pinecone.core.openapi.db_data.models", "ImportErrorMode"), + "VectorDictionaryMissingKeysError": ( + "pinecone.db_data.errors", + "VectorDictionaryMissingKeysError", + ), + "VectorDictionaryExcessKeysError": ( + "pinecone.db_data.errors", + "VectorDictionaryExcessKeysError", + ), + "VectorTupleLengthError": ("pinecone.db_data.errors", "VectorTupleLengthError"), + "SparseValuesTypeError": ("pinecone.db_data.errors", "SparseValuesTypeError"), + "SparseValuesMissingKeysError": ("pinecone.db_data.errors", "SparseValuesMissingKeysError"), + "SparseValuesDictionaryExpectedError": ( + "pinecone.db_data.errors", + "SparseValuesDictionaryExpectedError", + ), + "Index": ("pinecone.db_data.import_error", "Index"), + "Inference": ("pinecone.db_data.import_error", "Inference"), +} + +_db_control_lazy_imports = { + "CloudProvider": ("pinecone.db_control.enums", "CloudProvider"), + "AwsRegion": ("pinecone.db_control.enums", "AwsRegion"), + "GcpRegion": ("pinecone.db_control.enums", "GcpRegion"), + "AzureRegion": ("pinecone.db_control.enums", "AzureRegion"), + "PodIndexEnvironment": ("pinecone.db_control.enums", "PodIndexEnvironment"), + "Metric": ("pinecone.db_control.enums", "Metric"), + "VectorType": ("pinecone.db_control.enums", "VectorType"), + "DeletionProtection": ("pinecone.db_control.enums", "DeletionProtection"), + "CollectionDescription": ("pinecone.db_control.models", "CollectionDescription"), + "CollectionList": ("pinecone.db_control.models", "CollectionList"), + "IndexList": ("pinecone.db_control.models", "IndexList"), + "IndexModel": ("pinecone.db_control.models", "IndexModel"), + "IndexEmbed": ("pinecone.db_control.models", "IndexEmbed"), + "ByocSpec": ("pinecone.db_control.models", "ByocSpec"), + "ServerlessSpec": ("pinecone.db_control.models", "ServerlessSpec"), + "ServerlessSpecDefinition": ("pinecone.db_control.models", "ServerlessSpecDefinition"), + "PodSpec": ("pinecone.db_control.models", "PodSpec"), + "PodSpecDefinition": ("pinecone.db_control.models", "PodSpecDefinition"), + "PodType": ("pinecone.db_control.enums", "PodType"), + "RestoreJobModel": ("pinecone.db_control.models", "RestoreJobModel"), + "RestoreJobList": ("pinecone.db_control.models", "RestoreJobList"), + "BackupModel": ("pinecone.db_control.models", "BackupModel"), + "BackupList": ("pinecone.db_control.models", "BackupList"), +} + +_config_lazy_imports = { + "Config": ("pinecone.config", "Config"), + "ConfigBuilder": ("pinecone.config", "ConfigBuilder"), + "PineconeConfig": ("pinecone.config", "PineconeConfig"), +} + +# Define imports to be lazily loaded +_LAZY_IMPORTS = { + **_inference_lazy_imports, + **_db_data_lazy_imports, + **_db_control_lazy_imports, + **_config_lazy_imports, +} + +# Set up the lazy import handler +_setup_lazy_imports(_LAZY_IMPORTS) + # Raise an exception if the user is attempting to use the SDK with # deprecated plugins installed in their project. -check_for_deprecated_plugins() +_check_for_deprecated_plugins() # Silence annoying log messages from the plugin interface logging.getLogger("pinecone_plugin_interface").setLevel(logging.CRITICAL) + +__all__ = [ + "__version__", + # Deprecated top-levelfunctions + "init", + "create_index", + "delete_index", + "list_indexes", + "describe_index", + "configure_index", + "scale_index", + "create_collection", + "delete_collection", + "describe_collection", + "list_collections", + # Primary client classes + "Pinecone", + "PineconeAsyncio", + # All lazy-loaded types + *list(_LAZY_IMPORTS.keys()), + # Exception classes + "PineconeException", + "PineconeApiException", + "PineconeConfigurationError", + "PineconeProtocolError", + "PineconeApiAttributeError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeApiKeyError", + "NotFoundException", + "UnauthorizedException", + "ForbiddenException", + "ServiceException", + "ListConversionException", + "VectorDictionaryMissingKeysError", + "VectorDictionaryExcessKeysError", + "VectorTupleLengthError", + "SparseValuesTypeError", + "SparseValuesMissingKeysError", + "SparseValuesDictionaryExpectedError", +] diff --git a/pinecone/__init__.pyi b/pinecone/__init__.pyi new file mode 100644 index 00000000..f6873468 --- /dev/null +++ b/pinecone/__init__.pyi @@ -0,0 +1,128 @@ +from pinecone.config import Config +from pinecone.config import ConfigBuilder +from pinecone.config import PineconeConfig +from pinecone.inference import ( + RerankModel, + EmbedModel, + ModelInfo, + ModelInfoList, + EmbeddingsList, + RerankResult, +) +from pinecone.db_data.dataclasses import ( + Vector, + SparseValues, + SearchQuery, + SearchQueryVector, + SearchRerank, +) +from pinecone.db_data.models import ( + FetchResponse, + DeleteRequest, + DescribeIndexStatsRequest, + IndexDescription as DescribeIndexStatsResponse, + RpcStatus, + ScoredVector, + SingleQueryResults, + QueryRequest, + QueryResponse, + UpsertResponse, + UpdateRequest, +) +from pinecone.core.openapi.db_data.models import ImportErrorMode +from pinecone.db_data.errors import ( + VectorDictionaryMissingKeysError, + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + SparseValuesTypeError, + SparseValuesMissingKeysError, + SparseValuesDictionaryExpectedError, +) +from pinecone.db_control.enums import ( + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + PodIndexEnvironment, + Metric, + VectorType, + DeletionProtection, + PodType, +) +from pinecone.db_control.models import ( + CollectionDescription, + CollectionList, + IndexList, + IndexModel, + IndexEmbed, + ServerlessSpec, + ServerlessSpecDefinition, + PodSpec, + PodSpecDefinition, +) +from pinecone.pinecone import Pinecone +from pinecone.pinecone_asyncio import PineconeAsyncio + +# Re-export all the types +__all__ = [ + # Primary client classes + "Pinecone", + "PineconeAsyncio", + # Config classes + "Config", + "ConfigBuilder", + "PineconeConfig", + # Inference classes + "RerankModel", + "EmbedModel", + "ModelInfo", + "ModelInfoList", + "EmbeddingsList", + "RerankResult", + # Data classes + "Vector", + "SparseValues", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + # Model classes + "FetchResponse", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "RpcStatus", + "ScoredVector", + "SingleQueryResults", + "QueryRequest", + "QueryResponse", + "UpsertResponse", + "UpdateRequest", + "ImportErrorMode", + # Error classes + "VectorDictionaryMissingKeysError", + "VectorDictionaryExcessKeysError", + "VectorTupleLengthError", + "SparseValuesTypeError", + "SparseValuesMissingKeysError", + "SparseValuesDictionaryExpectedError", + # Control plane enums + "CloudProvider", + "AwsRegion", + "GcpRegion", + "AzureRegion", + "PodIndexEnvironment", + "Metric", + "VectorType", + "DeletionProtection", + "PodType", + # Control plane models + "CollectionDescription", + "CollectionList", + "IndexList", + "IndexModel", + "IndexEmbed", + "ServerlessSpec", + "ServerlessSpecDefinition", + "PodSpec", + "PodSpecDefinition", +] diff --git a/pinecone/config/__init__.py b/pinecone/config/__init__.py index 7abb7278..f292622f 100644 --- a/pinecone/config/__init__.py +++ b/pinecone/config/__init__.py @@ -2,6 +2,7 @@ import os from .config import ConfigBuilder, Config +from .openapi_configuration import Configuration as OpenApiConfiguration from .pinecone_config import PineconeConfig if os.getenv("PINECONE_DEBUG") is not None: diff --git a/pinecone/config/config.py b/pinecone/config/config.py index 01a703e0..9029c45a 100644 --- a/pinecone/config/config.py +++ b/pinecone/config/config.py @@ -1,9 +1,11 @@ -from typing import NamedTuple, Optional, Dict +from typing import NamedTuple, Optional, Dict, TYPE_CHECKING import os -from pinecone.exceptions.exceptions import PineconeConfigurationError -from pinecone.config.openapi import OpenApiConfigFactory -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.exceptions import PineconeConfigurationError +from pinecone.config.openapi_config_factory import OpenApiConfigFactory + +if TYPE_CHECKING: + from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration # Duplicated this util to help resolve circular imports @@ -81,8 +83,8 @@ def build( @staticmethod def build_openapi_config( - config: Config, openapi_config: Optional[OpenApiConfiguration] = None, **kwargs - ) -> OpenApiConfiguration: + config: Config, openapi_config: Optional["OpenApiConfiguration"] = None, **kwargs + ) -> "OpenApiConfiguration": if openapi_config: openapi_config = OpenApiConfigFactory.copy( openapi_config=openapi_config, api_key=config.api_key, host=config.host diff --git a/pinecone/config/openapi.py b/pinecone/config/openapi_config_factory.py similarity index 93% rename from pinecone/config/openapi.py rename to pinecone/config/openapi_config_factory.py index d6bdf702..56a1de64 100644 --- a/pinecone/config/openapi.py +++ b/pinecone/config/openapi_config_factory.py @@ -1,13 +1,11 @@ import sys -from typing import List, Optional +from typing import List, Optional, Tuple import certifi import socket import copy -from urllib3.connection import HTTPConnection - -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration TCP_KEEPINTVL = 60 # Sec TCP_KEEPIDLE = 300 # Sec @@ -58,7 +56,7 @@ def _get_socket_options( keep_alive_idle_sec: int = TCP_KEEPIDLE, keep_alive_interval_sec: int = TCP_KEEPINTVL, keep_alive_tries: int = TCP_KEEPCNT, - ) -> List[tuple]: + ) -> List[Tuple[int, int, int]]: """ Returns the socket options to pass to OpenAPI's Rest client Args: @@ -72,7 +70,8 @@ def _get_socket_options( """ # Source: https://www.finbourne.com/blog/the-mysterious-hanging-client-tcp-keep-alives - socket_params = HTTPConnection.default_socket_options + # urllib3.connection.HTTPConnection.default_socket_options + socket_params = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] if not do_keep_alive: return socket_params diff --git a/pinecone/config/openapi_configuration.py b/pinecone/config/openapi_configuration.py new file mode 100644 index 00000000..c3ce79a4 --- /dev/null +++ b/pinecone/config/openapi_configuration.py @@ -0,0 +1,458 @@ +import copy +import logging +import multiprocessing + +from pinecone.exceptions import PineconeApiValueError +from typing import TypedDict + + +class HostSetting(TypedDict): + url: str + description: str + + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + "multipleOf", + "maximum", + "exclusiveMaximum", + "minimum", + "exclusiveMinimum", + "maxLength", + "minLength", + "pattern", + "maxItems", + "minItems", +} + + +class Configuration: + """Class to hold the configuration of the API client. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + :param disabled_client_side_validations (string): Comma-separated list of + JSON schema validation keywords to disable JSON schema structural validation + rules. The following keywords may be specified: multipleOf, maximum, + exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, + maxItems, minItems. + By default, the validation is performed for data generated locally by the client + and data received from the server, independent of any validation performed by + the server side. If the input data does not satisfy the JSON schema validation + rules specified in the OpenAPI document, an exception is raised. + If disabled_client_side_validations is set, structural validation is + disabled. This can be useful to troubleshoot data validation problem, such as + when the OpenAPI document validation rules do not match the actual API data + received by the server. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format + + :Example: + + API Key Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + cookieAuth: # name for the security scheme + type: apiKey + in: cookie + name: JSESSIONID # cookie name + + You can programmatically set the cookie: + + conf = pinecone.config.openapi_configuration.Configuration( + api_key={'cookieAuth': 'abc123'} + api_key_prefix={'cookieAuth': 'JSESSIONID'} + ) + + The following cookie will be added to the HTTP request: + Cookie: JSESSIONID abc123 + """ + + _default = None + + def __init__( + self, + host=None, + api_key=None, + api_key_prefix=None, + discard_unknown_keys=False, + disabled_client_side_validations="", + server_index=None, + server_variables=None, + server_operation_index=None, + server_operation_variables=None, + ssl_ca_cert=None, + ): + """Constructor""" + self._base_path = "https://api.pinecone.io" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.discard_unknown_keys = discard_unknown_keys + self.disabled_client_side_validations = disabled_client_side_validations + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("pinecone.openapi_support") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = "%(asctime)s %(levelname)s %(message)s" + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + # Initialize debug directly without using the property setter + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = "" + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + # Options to pass down to the underlying urllib3 socket + self.socket_options = None + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ("logger", "logger_file_handler"): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + if name == "disabled_client_side_validations": + s = set(filter(None, value.split(","))) + for v in s: + if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: + raise PineconeApiValueError("Invalid keyword: '{0}''".format(v)) + self._disabled_client_side_validations = s + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self._debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + if hasattr(self, "_debug"): + previous_debug = self._debug + else: + previous_debug = None + self._debug = value + + def enable_http_logging(): + from http import client as http_client + + http_client.HTTPConnection.debuglevel = 1 + + def disable_http_logging(): + from http import client as http_client + + http_client.HTTPConnection.debuglevel = 0 + + def set_default_log_level(c): + for _, logger in c.logger.items(): + logger.setLevel(logging.WARNING) + + if self._debug: + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + enable_http_logging() + elif previous_debug is True and self._debug is False: + set_default_log_level(self) + disable_http_logging() + else: + # On the initial call, we don't need to do anything to http + # logging, since it's not enabled by default. + set_default_log_level(self) + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if "ApiKeyAuth" in self.api_key: + auth["ApiKeyAuth"] = { + "type": "api_key", + "in": "header", + "key": "Api-Key", + "value": self.get_api_key_with_prefix("ApiKeyAuth"), + } + return auth + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [{"url": "https://api.pinecone.io", "description": "Production API endpoints"}] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. Must be less than {1}".format( + index, len(servers) + ) + ) + + url = server["url"] + + # go through variables and replace placeholders + for variable_name, variable in server.get("variables", {}).items(): + used_value = variables.get(variable_name, variable["default_value"]) + + if "enum_values" in variable and used_value not in variable["enum_values"]: + raise ValueError( + "The variable `{0}` in the host URL has invalid value {1}. Must be {2}.".format( + variable_name, variables[variable_name], variable["enum_values"] + ) + ) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None + + def __repr__(self): + attrs = [ + f"host={self.host}", + "api_key=***", + f"api_key_prefix={self.api_key_prefix}", + f"connection_pool_maxsize={self.connection_pool_maxsize}", + f"discard_unknown_keys={self.discard_unknown_keys}", + f"disabled_client_side_validations={self.disabled_client_side_validations}", + f"server_index={self.server_index}", + f"server_variables={self.server_variables}", + f"server_operation_index={self.server_operation_index}", + f"server_operation_variables={self.server_operation_variables}", + f"ssl_ca_cert={self.ssl_ca_cert}", + ] + return f"Configuration({', '.join(attrs)})" diff --git a/pinecone/control/__init__.py b/pinecone/control/__init__.py index a26e352a..b45bc64e 100644 --- a/pinecone/control/__init__.py +++ b/pinecone/control/__init__.py @@ -1,6 +1,9 @@ -from .pinecone import Pinecone -from .pinecone_asyncio import PineconeAsyncio +import warnings -from .repr_overrides import install_repr_overrides +from pinecone.db_control import * -install_repr_overrides() +warnings.warn( + "The module at `pinecone.control` has moved to `pinecone.db_control`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) diff --git a/pinecone/control/pinecone.py b/pinecone/control/pinecone.py deleted file mode 100644 index f3c8f404..00000000 --- a/pinecone/control/pinecone.py +++ /dev/null @@ -1,354 +0,0 @@ -import time -import logging -from typing import Optional, Dict, Union -from multiprocessing import cpu_count - -from .index_host_store import IndexHostStore -from .pinecone_interface import PineconeDBControlInterface - -from pinecone.config import PineconeConfig, ConfigBuilder - -from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi -from pinecone.openapi_support.api_client import ApiClient - - -from pinecone.utils import normalize_host, setup_openapi_client, PluginAware -from pinecone.core.openapi.db_control import API_VERSION -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexModel, - IndexList, - CollectionList, - IndexEmbed, -) -from .langchain_import_warnings import _build_langchain_attribute_error_message -from pinecone.utils import docslinks -from pinecone.data import _Index, _Inference, _IndexAsyncio - -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict -from .request_factory import PineconeDBControlRequestFactory - -logger = logging.getLogger(__name__) -""" @private """ - - -class Pinecone(PineconeDBControlInterface, PluginAware): - """ - A client for interacting with Pinecone's vector database. - - This class implements methods for managing and interacting with Pinecone resources - such as collections and indexes. - """ - - def __init__( - self, - api_key: Optional[str] = None, - host: Optional[str] = None, - proxy_url: Optional[str] = None, - proxy_headers: Optional[Dict[str, str]] = None, - ssl_ca_certs: Optional[str] = None, - ssl_verify: Optional[bool] = None, - additional_headers: Optional[Dict[str, str]] = {}, - pool_threads: Optional[int] = None, - **kwargs, - ): - for deprecated_kwarg in {"config", "openapi_config", "index_api"}: - if deprecated_kwarg in kwargs: - raise NotImplementedError( - f"Passing {deprecated_kwarg} is no longer supported. Please pass individual settings such as proxy_url, proxy_headers, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at {docslinks['README']} for examples." - ) - - self.config = PineconeConfig.build( - api_key=api_key, - host=host, - additional_headers=additional_headers, - proxy_url=proxy_url, - proxy_headers=proxy_headers, - ssl_ca_certs=ssl_ca_certs, - ssl_verify=ssl_verify, - **kwargs, - ) - """ @private """ - - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, **kwargs) - """ @private """ - - if pool_threads is None: - self.pool_threads = 5 * cpu_count() - """ @private """ - else: - self.pool_threads = pool_threads - """ @private """ - - self._inference = None # Lazy initialization - """ @private """ - - self.index_api = setup_openapi_client( - api_client_klass=ApiClient, - api_klass=ManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, - pool_threads=pool_threads, - api_version=API_VERSION, - ) - """ @private """ - - self.index_host_store = IndexHostStore() - """ @private """ - - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads - ) - - @property - def inference(self): - """ - Inference is a namespace where an instance of the `pinecone.data.features.inference.inference.Inference` class is lazily created and cached. - """ - if self._inference is None: - self._inference = _Inference(config=self.config, openapi_config=self.openapi_config) - return self._inference - - def create_index( - self, - name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], - dimension: Optional[int] = None, - metric: Optional[Union[Metric, str]] = Metric.COSINE, - timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, - tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_request( - name=name, - spec=spec, - dimension=dimension, - metric=metric, - deletion_protection=deletion_protection, - vector_type=vector_type, - tags=tags, - ) - resp = self.index_api.create_index(create_index_request=req) - - if timeout == -1: - return IndexModel(resp) - return self.__poll_describe_index_until_ready(name, timeout) - - def create_index_for_model( - self, - name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], - tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - timeout: Optional[int] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_for_model_request( - name=name, - cloud=cloud, - region=region, - embed=embed, - tags=tags, - deletion_protection=deletion_protection, - ) - resp = self.index_api.create_index_for_model(req) - - if timeout == -1: - return IndexModel(resp) - return self.__poll_describe_index_until_ready(name, timeout) - - def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): - description = None - - def is_ready() -> bool: - nonlocal description - description = self.describe_index(name=name) - return description.status.ready - - total_wait_time = 0 - if timeout is None: - # Wait indefinitely - while not is_ready(): - logger.debug( - f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." - ) - total_wait_time += 5 - time.sleep(5) - - else: - # Wait for a maximum of timeout seconds - while not is_ready(): - if timeout < 0: - logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] - timeout_msg = ( - f"Please call describe_index() to confirm index status. See docs at {link}" - ) - raise TimeoutError(timeout_msg) - - logger.debug( - f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" - ) - total_wait_time += 5 - time.sleep(5) - timeout -= 5 - - return description - - def delete_index(self, name: str, timeout: Optional[int] = None): - self.index_api.delete_index(name) - self.index_host_store.delete_host(self.config, name) - - if timeout == -1: - return - - if timeout is None: - while self.has_index(name): - time.sleep(5) - else: - while self.has_index(name) and timeout >= 0: - time.sleep(5) - timeout -= 5 - if timeout and timeout < 0: - raise ( - TimeoutError( - "Please call the list_indexes API ({}) to confirm if index is deleted".format( - "https://www.pinecone.io/docs/api/operation/list_indexes/" - ) - ) - ) - - def list_indexes(self) -> IndexList: - response = self.index_api.list_indexes() - return IndexList(response) - - def describe_index(self, name: str) -> IndexModel: - api_instance = self.index_api - description = api_instance.describe_index(name) - host = description.host - self.index_host_store.set_host(self.config, name, host) - - return IndexModel(description) - - def has_index(self, name: str) -> bool: - if name in self.list_indexes().names(): - return True - else: - return False - - def configure_index( - self, - name: str, - replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, - tags: Optional[Dict[str, str]] = None, - ): - api_instance = self.index_api - description = self.describe_index(name=name) - - req = PineconeDBControlRequestFactory.configure_index_request( - description=description, - replicas=replicas, - pod_type=pod_type, - deletion_protection=deletion_protection, - tags=tags, - ) - api_instance.configure_index(name, configure_index_request=req) - - def create_collection(self, name: str, source: str) -> None: - req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) - self.index_api.create_collection(create_collection_request=req) - - def list_collections(self) -> CollectionList: - response = self.index_api.list_collections() - return CollectionList(response) - - def delete_collection(self, name: str) -> None: - self.index_api.delete_collection(name) - - def describe_collection(self, name: str): - return self.index_api.describe_collection(name).to_dict() - - @staticmethod - def from_texts(*args, **kwargs): - """@private""" - raise AttributeError(_build_langchain_attribute_error_message("from_texts")) - - @staticmethod - def from_documents(*args, **kwargs): - """@private""" - raise AttributeError(_build_langchain_attribute_error_message("from_documents")) - - def Index(self, name: str = "", host: str = "", **kwargs): - if name == "" and host == "": - raise ValueError("Either name or host must be specified") - - pt = kwargs.pop("pool_threads", None) or self.pool_threads - api_key = self.config.api_key - openapi_config = self.openapi_config - - if host != "": - check_realistic_host(host) - - # Use host url if it is provided - index_host = normalize_host(host) - else: - # Otherwise, get host url from describe_index using the index name - index_host = self.index_host_store.get_host(self.index_api, self.config, name) - - return _Index( - host=index_host, - api_key=api_key, - pool_threads=pt, - openapi_config=openapi_config, - source_tag=self.config.source_tag, - **kwargs, - ) - - def IndexAsyncio(self, host: str, **kwargs): - api_key = self.config.api_key - openapi_config = self.openapi_config - - if host is None or host == "": - raise ValueError("A host must be specified") - - check_realistic_host(host) - index_host = normalize_host(host) - - return _IndexAsyncio( - host=index_host, - api_key=api_key, - openapi_config=openapi_config, - source_tag=self.config.source_tag, - **kwargs, - ) - - -def check_realistic_host(host: str) -> None: - """@private - - Checks whether a user-provided host string seems plausible. - Someone could erroneously pass an index name as the host by - mistake, and if they have done that we'd like to give them a - simple error message as feedback rather than attempting to - call the url and getting a more cryptic DNS resolution error. - """ - - if "." not in host and "localhost" not in host: - raise ValueError( - f"You passed '{host}' as the host but this does not appear to be valid. Call describe_index() to confirm the host of the index." - ) diff --git a/pinecone/control/pinecone_asyncio.py b/pinecone/control/pinecone_asyncio.py deleted file mode 100644 index 1373c8e4..00000000 --- a/pinecone/control/pinecone_asyncio.py +++ /dev/null @@ -1,340 +0,0 @@ -import logging -import asyncio -from typing import Optional, Dict, Union - -from pinecone.config import PineconeConfig, ConfigBuilder - -from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi -from pinecone.openapi_support import AsyncioApiClient - -from pinecone.utils import normalize_host, setup_async_openapi_client -from pinecone.core.openapi.db_control import API_VERSION -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexModel, - IndexList, - CollectionList, - IndexEmbed, -) -from pinecone.utils import docslinks - -from pinecone.data import _IndexAsyncio, _AsyncioInference -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict -from .request_factory import PineconeDBControlRequestFactory -from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface -from .pinecone import check_realistic_host - -logger = logging.getLogger(__name__) -""" @private """ - - -class PineconeAsyncio(PineconeAsyncioDBControlInterface): - """ - `PineconeAsyncio` is an asyncio client for interacting with Pinecone's control plane API. - - This class implements methods for managing and interacting with Pinecone resources - such as collections and indexes. - - To perform data operations such as inserting and querying vectors, use the `IndexAsyncio` class. - - ```python - import asyncio - from pinecone import Pinecone - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="my-index.pinecone.io") as idx: - await idx.upsert(vectors=[(1, [1, 2, 3]), (2, [4, 5, 6])]) - - asyncio.run(main()) - ``` - """ - - def __init__( - self, - api_key: Optional[str] = None, - host: Optional[str] = None, - proxy_url: Optional[str] = None, - # proxy_headers: Optional[Dict[str, str]] = None, - ssl_ca_certs: Optional[str] = None, - ssl_verify: Optional[bool] = None, - additional_headers: Optional[Dict[str, str]] = {}, - **kwargs, - ): - for deprecated_kwarg in {"config", "openapi_config"}: - if deprecated_kwarg in kwargs: - raise NotImplementedError( - f"Passing {deprecated_kwarg} is no longer supported. Please pass individual settings such as proxy_url, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at {docslinks['README']} for examples." - ) - - for unimplemented_kwarg in {"proxy_headers"}: - if unimplemented_kwarg in kwargs: - raise NotImplementedError( - f"You have passed {unimplemented_kwarg} but this configuration has not been implemented for PineconeAsyncio." - ) - - self.config = PineconeConfig.build( - api_key=api_key, - host=host, - additional_headers=additional_headers, - proxy_url=proxy_url, - proxy_headers=None, - ssl_ca_certs=ssl_ca_certs, - ssl_verify=ssl_verify, - **kwargs, - ) - """ @private """ - - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, **kwargs) - """ @private """ - - self._inference = None # Lazy initialization - """ @private """ - - self.index_api = setup_async_openapi_client( - api_client_klass=AsyncioApiClient, - api_klass=AsyncioManageIndexesApi, - config=self.config, - openapi_config=self.openapi_config, - api_version=API_VERSION, - ) - """ @private """ - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.close() - - async def close(self): - """Cleanup resources used by the Pinecone client. - - This method should be called when the client is no longer needed so that - it can cleanup the aioahttp session and other resources. - - After close has been called, the client instance should not be used. - - ```python - import asyncio - from pinecone import PineconeAsyncio - - async def main(): - pc = PineconeAsyncio() - desc = await pc.describe_index(name="my-index") - await pc.close() - - asyncio.run(main()) - ``` - - If you are using the client as a context manager, the close method is called automatically - when exiting. - - ```python - import asyncio - from pinecone import PineconeAsyncio - - async def main(): - async with PineconeAsyncio() as pc: - desc = await pc.describe_index(name="my-index") - - # No need to call close in this case because the "async with" syntax - # automatically calls close when exiting the block. - asyncio.run(main()) - ``` - - """ - await self.index_api.api_client.close() - - @property - def inference(self): - """Dynamically create and cache the Inference instance.""" - if self._inference is None: - self._inference = _AsyncioInference(api_client=self.index_api.api_client) - return self._inference - - async def create_index( - self, - name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], - dimension: Optional[int] = None, - metric: Optional[Union[Metric, str]] = Metric.COSINE, - timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, - tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_request( - name=name, - spec=spec, - dimension=dimension, - metric=metric, - deletion_protection=deletion_protection, - vector_type=vector_type, - tags=tags, - ) - resp = await self.index_api.create_index(create_index_request=req) - - if timeout == -1: - return IndexModel(resp) - return await self.__poll_describe_index_until_ready(name, timeout) - - async def create_index_for_model( - self, - name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], - tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - timeout: Optional[int] = None, - ) -> IndexModel: - req = PineconeDBControlRequestFactory.create_index_for_model_request( - name=name, - cloud=cloud, - region=region, - embed=embed, - tags=tags, - deletion_protection=deletion_protection, - ) - resp = await self.index_api.create_index_for_model(req) - - if timeout == -1: - return IndexModel(resp) - return await self.__poll_describe_index_until_ready(name, timeout) - - async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): - description = None - - async def is_ready() -> bool: - nonlocal description - description = await self.describe_index(name=name) - return description.status.ready - - total_wait_time = 0 - if timeout is None: - # Wait indefinitely - while not await is_ready(): - logger.debug( - f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." - ) - total_wait_time += 5 - await asyncio.sleep(5) - - else: - # Wait for a maximum of timeout seconds - while not await is_ready(): - if timeout < 0: - logger.error(f"Index {name} is not ready. Timeout reached.") - link = docslinks["API_DESCRIBE_INDEX"] - timeout_msg = ( - f"Please call describe_index() to confirm index status. See docs at {link}" - ) - raise TimeoutError(timeout_msg) - - logger.debug( - f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" - ) - total_wait_time += 5 - await asyncio.sleep(5) - timeout -= 5 - - return description - - async def delete_index(self, name: str, timeout: Optional[int] = None): - await self.index_api.delete_index(name) - - if timeout == -1: - return - - if timeout is None: - while await self.has_index(name): - await asyncio.sleep(5) - else: - while await self.has_index(name) and timeout >= 0: - await asyncio.sleep(5) - timeout -= 5 - if timeout and timeout < 0: - raise ( - TimeoutError( - "Please call the list_indexes API ({}) to confirm if index is deleted".format( - "https://www.pinecone.io/docs/api/operation/list_indexes/" - ) - ) - ) - - async def list_indexes(self) -> IndexList: - response = await self.index_api.list_indexes() - return IndexList(response) - - async def describe_index(self, name: str) -> IndexModel: - description = await self.index_api.describe_index(name) - return IndexModel(description) - - async def has_index(self, name: str) -> bool: - available_indexes = await self.list_indexes() - if name in available_indexes.names(): - return True - else: - return False - - async def configure_index( - self, - name: str, - replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, - tags: Optional[Dict[str, str]] = None, - ): - description = await self.describe_index(name=name) - - req = PineconeDBControlRequestFactory.configure_index_request( - description=description, - replicas=replicas, - pod_type=pod_type, - deletion_protection=deletion_protection, - tags=tags, - ) - await self.index_api.configure_index(name, configure_index_request=req) - - async def create_collection(self, name: str, source: str): - req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) - await self.index_api.create_collection(create_collection_request=req) - - async def list_collections(self) -> CollectionList: - response = await self.index_api.list_collections() - return CollectionList(response) - - async def delete_collection(self, name: str): - await self.index_api.delete_collection(name) - - async def describe_collection(self, name: str): - return await self.index_api.describe_collection(name).to_dict() - - def IndexAsyncio(self, host: str, **kwargs) -> _IndexAsyncio: - api_key = self.config.api_key - openapi_config = self.openapi_config - - if host is None or host == "": - raise ValueError("A host must be specified") - - check_realistic_host(host) - index_host = normalize_host(host) - - return _IndexAsyncio( - host=index_host, - api_key=api_key, - openapi_config=openapi_config, - source_tag=self.config.source_tag, - **kwargs, - ) diff --git a/pinecone/core/openapi/db_control/__init__.py b/pinecone/core/openapi/db_control/__init__.py index e8106fac..31408552 100644 --- a/pinecone/core/openapi/db_control/__init__.py +++ b/pinecone/core/openapi/db_control/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/db_control/api/manage_indexes_api.py b/pinecone/core/openapi/db_control/api/manage_indexes_api.py index 2d2f464d..ae478017 100644 --- a/pinecone/core/openapi/db_control/api/manage_indexes_api.py +++ b/pinecone/core/openapi/db_control/api/manage_indexes_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -23,17 +23,28 @@ none_type, validate_and_convert_types, ) +from pinecone.core.openapi.db_control.model.backup_list import BackupList +from pinecone.core.openapi.db_control.model.backup_model import BackupModel from pinecone.core.openapi.db_control.model.collection_list import CollectionList from pinecone.core.openapi.db_control.model.collection_model import CollectionModel from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) +from pinecone.core.openapi.db_control.model.create_index_from_backup_response import ( + CreateIndexFromBackupResponse, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest from pinecone.core.openapi.db_control.model.error_response import ErrorResponse from pinecone.core.openapi.db_control.model.index_list import IndexList from pinecone.core.openapi.db_control.model.index_model import IndexModel +from pinecone.core.openapi.db_control.model.restore_job_list import RestoreJobList +from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel class ManageIndexesApi: @@ -52,7 +63,7 @@ def __configure_index( ): """Configure an index # noqa: E501 - This operation configures an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -123,12 +134,88 @@ def __configure_index( callable=__configure_index, ) + def __create_backup( + self, index_name, create_backup_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Create a backup of an index # noqa: E501 + + Create a backup of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_backup(index_name, create_backup_request, async_req=True) + >>> result = thread.get() + + Args: + index_name (str): Name of the index to backup + create_backup_request (CreateBackupRequest): The desired configuration for the backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupModel + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["create_backup_request"] = create_backup_request + return self.call_with_http_info(**kwargs) + + self.create_backup = _Endpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "create_backup", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["index_name", "create_backup_request"], + "required": ["index_name", "create_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "create_backup_request": (CreateBackupRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "create_backup_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_backup, + ) + def __create_collection( self, create_collection_request, **kwargs: ExtraOpenApiKwargsTypedDict ): """Create a collection # noqa: E501 - This operation creates a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -197,7 +284,7 @@ def __create_collection( def __create_index(self, create_index_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Create an index # noqa: E501 - This operation deploys a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -266,9 +353,9 @@ def __create_index(self, create_index_request, **kwargs: ExtraOpenApiKwargsTyped def __create_index_for_model( self, create_index_for_model_request, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Create an index for an embedding model # noqa: E501 + """Create an index with integrated embedding # noqa: E501 - This operation creates a serverless integrated inference index for a specific embedding model. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available models and model details. # noqa: E501 + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -334,18 +421,21 @@ def __create_index_for_model( callable=__create_index_for_model, ) - def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Delete a collection # noqa: E501 + def __create_index_from_backup_operation( + self, backup_id, create_index_from_backup_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Create an index from a backup # noqa: E501 - This operation deletes an existing collection. Serverless indexes do not support collections. # noqa: E501 + Create an index from a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_collection(collection_name, async_req=True) + >>> thread = api.create_index_from_backup_operation(backup_id, create_index_from_backup_request, async_req=True) >>> result = thread.get() Args: - collection_name (str): The name of the collection. + backup_id (str): The ID of the backup to create an index from. + create_index_from_backup_request (CreateIndexFromBackupRequest): The desired configuration for the index created from a backup. Keyword Args: _return_http_data_only (bool): response data without head status @@ -366,26 +456,27 @@ def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyped async_req (bool): execute request asynchronously Returns: - None + CreateIndexFromBackupResponse If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) - kwargs["collection_name"] = collection_name + kwargs["backup_id"] = backup_id + kwargs["create_index_from_backup_request"] = create_index_from_backup_request return self.call_with_http_info(**kwargs) - self.delete_collection = _Endpoint( + self.create_index_from_backup_operation = _Endpoint( settings={ - "response_type": None, + "response_type": (CreateIndexFromBackupResponse,), "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections/{collection_name}", - "operation_id": "delete_collection", - "http_method": "DELETE", + "endpoint_path": "/backups/{backup_id}/create-index", + "operation_id": "create_index_from_backup_operation", + "http_method": "POST", "servers": None, }, params_map={ - "all": ["collection_name"], - "required": ["collection_name"], + "all": ["backup_id", "create_index_from_backup_request"], + "required": ["backup_id", "create_index_from_backup_request"], "nullable": [], "enum": [], "validation": [], @@ -393,28 +484,31 @@ def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyped root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"collection_name": (str,)}, - "attribute_map": {"collection_name": "collection_name"}, - "location_map": {"collection_name": "path"}, + "openapi_types": { + "backup_id": (str,), + "create_index_from_backup_request": (CreateIndexFromBackupRequest,), + }, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path", "create_index_from_backup_request": "body"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": []}, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__delete_collection, + callable=__create_index_from_backup_operation, ) - def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Delete an index # noqa: E501 + def __delete_backup(self, backup_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a backup # noqa: E501 - This operation deletes an existing index. # noqa: E501 + Delete a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_index(index_name, async_req=True) + >>> thread = api.delete_backup(backup_id, async_req=True) >>> result = thread.get() Args: - index_name (str): The name of the index to delete. + backup_id (str): The ID of the backup to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -440,21 +534,21 @@ def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): thread. """ kwargs = self._process_openapi_kwargs(kwargs) - kwargs["index_name"] = index_name + kwargs["backup_id"] = backup_id return self.call_with_http_info(**kwargs) - self.delete_index = _Endpoint( + self.delete_backup = _Endpoint( settings={ "response_type": None, "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes/{index_name}", - "operation_id": "delete_index", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "delete_backup", "http_method": "DELETE", "servers": None, }, params_map={ - "all": ["index_name"], - "required": ["index_name"], + "all": ["backup_id"], + "required": ["backup_id"], "nullable": [], "enum": [], "validation": [], @@ -462,28 +556,28 @@ def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"index_name": (str,)}, - "attribute_map": {"index_name": "index_name"}, - "location_map": {"index_name": "path"}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__delete_index, + callable=__delete_backup, ) - def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Describe a collection # noqa: E501 + def __delete_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a collection # noqa: E501 - This operation gets a description of a collection. Serverless indexes do not support collections. # noqa: E501 + Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.describe_collection(collection_name, async_req=True) + >>> thread = api.delete_collection(collection_name, async_req=True) >>> result = thread.get() Args: - collection_name (str): The name of the collection to be described. + collection_name (str): The name of the collection. Keyword Args: _return_http_data_only (bool): response data without head status @@ -504,7 +598,7 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp async_req (bool): execute request asynchronously Returns: - CollectionModel + None If the method is called asynchronously, returns the request thread. """ @@ -512,13 +606,13 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) - self.describe_collection = _Endpoint( + self.delete_collection = _Endpoint( settings={ - "response_type": (CollectionModel,), + "response_type": None, "auth": ["ApiKeyAuth"], "endpoint_path": "/collections/{collection_name}", - "operation_id": "describe_collection", - "http_method": "GET", + "operation_id": "delete_collection", + "http_method": "DELETE", "servers": None, }, params_map={ @@ -538,21 +632,21 @@ def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTyp }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__describe_collection, + callable=__delete_collection, ) - def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Describe an index # noqa: E501 + def __delete_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete an index # noqa: E501 - Get a description of an index. # noqa: E501 + Delete an existing index. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.describe_index(index_name, async_req=True) + >>> thread = api.delete_index(index_name, async_req=True) >>> result = thread.get() Args: - index_name (str): The name of the index to be described. + index_name (str): The name of the index to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -573,7 +667,7 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - IndexModel + None If the method is called asynchronously, returns the request thread. """ @@ -581,13 +675,13 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): kwargs["index_name"] = index_name return self.call_with_http_info(**kwargs) - self.describe_index = _Endpoint( + self.delete_index = _Endpoint( settings={ - "response_type": (IndexModel,), + "response_type": None, "auth": ["ApiKeyAuth"], "endpoint_path": "/indexes/{index_name}", - "operation_id": "describe_index", - "http_method": "GET", + "operation_id": "delete_index", + "http_method": "DELETE", "servers": None, }, params_map={ @@ -607,19 +701,21 @@ def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__describe_index, + callable=__delete_index, ) - def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """List collections # noqa: E501 + def __describe_backup(self, backup_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a backup # noqa: E501 - This operation returns a list of all collections in a project. Serverless indexes do not support collections. # noqa: E501 + Get a description of a backup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_collections(async_req=True) + >>> thread = api.describe_backup(backup_id, async_req=True) >>> result = thread.get() + Args: + backup_id (str): The ID of the backup to describe. Keyword Args: _return_http_data_only (bool): response data without head status @@ -640,46 +736,55 @@ def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - CollectionList + BackupModel If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id return self.call_with_http_info(**kwargs) - self.list_collections = _Endpoint( + self.describe_backup = _Endpoint( settings={ - "response_type": (CollectionList,), + "response_type": (BackupModel,), "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections", - "operation_id": "list_collections", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "describe_backup", "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["backup_id"], + "required": ["backup_id"], + "nullable": [], + "enum": [], + "validation": [], + }, root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__list_collections, + callable=__describe_backup, ) - def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """List indexes # noqa: E501 + def __describe_collection(self, collection_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a collection # noqa: E501 - This operation returns a list of all indexes in a project. # noqa: E501 + Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_indexes(async_req=True) + >>> thread = api.describe_collection(collection_name, async_req=True) >>> result = thread.get() + Args: + collection_name (str): The name of the collection to be described. Keyword Args: _return_http_data_only (bool): response data without head status @@ -700,57 +805,55 @@ def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): async_req (bool): execute request asynchronously Returns: - IndexList + CollectionModel If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) + kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) - self.list_indexes = _Endpoint( + self.describe_collection = _Endpoint( settings={ - "response_type": (IndexList,), + "response_type": (CollectionModel,), "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes", - "operation_id": "list_indexes", + "endpoint_path": "/collections/{collection_name}", + "operation_id": "describe_collection", "http_method": "GET", "servers": None, }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + params_map={ + "all": ["collection_name"], + "required": ["collection_name"], + "nullable": [], + "enum": [], + "validation": [], + }, root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, + "openapi_types": {"collection_name": (str,)}, + "attribute_map": {"collection_name": "collection_name"}, + "location_map": {"collection_name": "path"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__list_indexes, + callable=__describe_collection, ) + def __describe_index(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe an index # noqa: E501 -class AsyncioManageIndexesApi: - """NOTE: This class is @generated using OpenAPI - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = AsyncioApiClient() - self.api_client = api_client - - async def __configure_index(self, index_name, configure_index_request, **kwargs): - """Configure an index # noqa: E501 - - This operation configures an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + Get a description of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.describe_index(index_name, async_req=True) + >>> result = thread.get() Args: - index_name (str): The name of the index to configure. - configure_index_request (ConfigureIndexRequest): The desired pod size and replica configuration for the index. + index_name (str): The name of the index to be described. Keyword Args: _return_http_data_only (bool): response data without head status @@ -768,27 +871,29 @@ async def __configure_index(self, index_name, configure_index_request, **kwargs) _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. + async_req (bool): execute request asynchronously Returns: IndexModel + If the method is called asynchronously, returns the request + thread. """ - self._process_openapi_kwargs(kwargs) + kwargs = self._process_openapi_kwargs(kwargs) kwargs["index_name"] = index_name - kwargs["configure_index_request"] = configure_index_request - return await self.call_with_http_info(**kwargs) + return self.call_with_http_info(**kwargs) - self.configure_index = _AsyncioEndpoint( + self.describe_index = _Endpoint( settings={ "response_type": (IndexModel,), "auth": ["ApiKeyAuth"], "endpoint_path": "/indexes/{index_name}", - "operation_id": "configure_index", - "http_method": "PATCH", + "operation_id": "describe_index", + "http_method": "GET", "servers": None, }, params_map={ - "all": ["index_name", "configure_index_request"], - "required": ["index_name", "configure_index_request"], + "all": ["index_name"], + "required": ["index_name"], "nullable": [], "enum": [], "validation": [], @@ -796,27 +901,28 @@ async def __configure_index(self, index_name, configure_index_request, **kwargs) root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": { - "index_name": (str,), - "configure_index_request": (ConfigureIndexRequest,), - }, + "openapi_types": {"index_name": (str,)}, "attribute_map": {"index_name": "index_name"}, - "location_map": {"index_name": "path", "configure_index_request": "body"}, + "location_map": {"index_name": "path"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__configure_index, + callable=__describe_index, ) - async def __create_collection(self, create_collection_request, **kwargs): - """Create a collection # noqa: E501 + def __describe_restore_job(self, job_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a restore job # noqa: E501 - This operation creates a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + Get a description of a restore job. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.describe_restore_job(job_id, async_req=True) + >>> result = thread.get() Args: - create_collection_request (CreateCollectionRequest): The desired configuration for the collection. + job_id (str): The ID of the restore job to describe. Keyword Args: _return_http_data_only (bool): response data without head status @@ -834,11 +940,561 @@ async def __create_collection(self, create_collection_request, **kwargs): _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. + async_req (bool): execute request asynchronously Returns: - CollectionModel + RestoreJobModel + If the method is called asynchronously, returns the request + thread. """ - self._process_openapi_kwargs(kwargs) + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["job_id"] = job_id + return self.call_with_http_info(**kwargs) + + self.describe_restore_job = _Endpoint( + settings={ + "response_type": (RestoreJobModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs/{job_id}", + "operation_id": "describe_restore_job", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["job_id"], + "required": ["job_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"job_id": (str,)}, + "attribute_map": {"job_id": "job_id"}, + "location_map": {"job_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_restore_job, + ) + + def __list_collections(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List collections # noqa: E501 + + List all collections in a project. Serverless indexes do not support collections. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_collections(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + CollectionList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_collections = _Endpoint( + settings={ + "response_type": (CollectionList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections", + "operation_id": "list_collections", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_collections, + ) + + def __list_index_backups(self, index_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """List backups for an index # noqa: E501 + + List all backups for an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_index_backups(index_name, async_req=True) + >>> result = thread.get() + + Args: + index_name (str): Name of the backed up index + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + return self.call_with_http_info(**kwargs) + + self.list_index_backups = _Endpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "list_index_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["index_name", "limit", "pagination_token"], + "required": ["index_name"], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "limit": (int,), + "pagination_token": (str,), + }, + "attribute_map": { + "index_name": "index_name", + "limit": "limit", + "pagination_token": "paginationToken", + }, + "location_map": { + "index_name": "path", + "limit": "query", + "pagination_token": "query", + }, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_index_backups, + ) + + def __list_indexes(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List indexes # noqa: E501 + + List all indexes in a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_indexes(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + IndexList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_indexes = _Endpoint( + settings={ + "response_type": (IndexList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes", + "operation_id": "list_indexes", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_indexes, + ) + + def __list_project_backups(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List backups for all indexes in a project # noqa: E501 + + List all backups for a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_project_backups(async_req=True) + >>> result = thread.get() + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + BackupList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_project_backups = _Endpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups", + "operation_id": "list_project_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_project_backups, + ) + + def __list_restore_jobs(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List restore jobs # noqa: E501 + + List all restore jobs for a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_restore_jobs(async_req=True) + >>> result = thread.get() + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + RestoreJobList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_restore_jobs = _Endpoint( + settings={ + "response_type": (RestoreJobList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs", + "operation_id": "list_restore_jobs", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_restore_jobs, + ) + + +class AsyncioManageIndexesApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __configure_index(self, index_name, configure_index_request, **kwargs): + """Configure an index # noqa: E501 + + Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 + + + Args: + index_name (str): The name of the index to configure. + configure_index_request (ConfigureIndexRequest): The desired pod size and replica configuration for the index. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + IndexModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["configure_index_request"] = configure_index_request + return await self.call_with_http_info(**kwargs) + + self.configure_index = _AsyncioEndpoint( + settings={ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}", + "operation_id": "configure_index", + "http_method": "PATCH", + "servers": None, + }, + params_map={ + "all": ["index_name", "configure_index_request"], + "required": ["index_name", "configure_index_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "configure_index_request": (ConfigureIndexRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "configure_index_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__configure_index, + ) + + async def __create_backup(self, index_name, create_backup_request, **kwargs): + """Create a backup of an index # noqa: E501 + + Create a backup of an index. # noqa: E501 + + + Args: + index_name (str): Name of the index to backup + create_backup_request (CreateBackupRequest): The desired configuration for the backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + kwargs["create_backup_request"] = create_backup_request + return await self.call_with_http_info(**kwargs) + + self.create_backup = _AsyncioEndpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "create_backup", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["index_name", "create_backup_request"], + "required": ["index_name", "create_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "create_backup_request": (CreateBackupRequest,), + }, + "attribute_map": {"index_name": "index_name"}, + "location_map": {"index_name": "path", "create_backup_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_backup, + ) + + async def __create_collection(self, create_collection_request, **kwargs): + """Create a collection # noqa: E501 + + Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 + + + Args: + create_collection_request (CreateCollectionRequest): The desired configuration for the collection. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + CollectionModel + """ + self._process_openapi_kwargs(kwargs) kwargs["create_collection_request"] = create_collection_request return await self.call_with_http_info(**kwargs) @@ -874,7 +1530,7 @@ async def __create_collection(self, create_collection_request, **kwargs): async def __create_index(self, create_index_request, **kwargs): """Create an index # noqa: E501 - This operation deploys a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/indexes/create-an-index#create-a-serverless-index). # noqa: E501 + Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 Args: @@ -923,24 +1579,155 @@ async def __create_index(self, create_index_request, **kwargs): root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"create_index_request": (CreateIndexRequest,)}, - "attribute_map": {}, - "location_map": {"create_index_request": "body"}, + "openapi_types": {"create_index_request": (CreateIndexRequest,)}, + "attribute_map": {}, + "location_map": {"create_index_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_index, + ) + + async def __create_index_for_model(self, create_index_for_model_request, **kwargs): + """Create an index with integrated embedding # noqa: E501 + + Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-01/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-01/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 + + + Args: + create_index_for_model_request (CreateIndexForModelRequest): The desired configuration for the index and associated embedding model. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + IndexModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["create_index_for_model_request"] = create_index_for_model_request + return await self.call_with_http_info(**kwargs) + + self.create_index_for_model = _AsyncioEndpoint( + settings={ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/create-for-model", + "operation_id": "create_index_for_model", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["create_index_for_model_request"], + "required": ["create_index_for_model_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"create_index_for_model_request": (CreateIndexForModelRequest,)}, + "attribute_map": {}, + "location_map": {"create_index_for_model_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_index_for_model, + ) + + async def __create_index_from_backup_operation( + self, backup_id, create_index_from_backup_request, **kwargs + ): + """Create an index from a backup # noqa: E501 + + Create an index from a backup. # noqa: E501 + + + Args: + backup_id (str): The ID of the backup to create an index from. + create_index_from_backup_request (CreateIndexFromBackupRequest): The desired configuration for the index created from a backup. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + CreateIndexFromBackupResponse + """ + self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id + kwargs["create_index_from_backup_request"] = create_index_from_backup_request + return await self.call_with_http_info(**kwargs) + + self.create_index_from_backup_operation = _AsyncioEndpoint( + settings={ + "response_type": (CreateIndexFromBackupResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups/{backup_id}/create-index", + "operation_id": "create_index_from_backup_operation", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["backup_id", "create_index_from_backup_request"], + "required": ["backup_id", "create_index_from_backup_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "backup_id": (str,), + "create_index_from_backup_request": (CreateIndexFromBackupRequest,), + }, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path", "create_index_from_backup_request": "body"}, "collection_format_map": {}, }, headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_index, + callable=__create_index_from_backup_operation, ) - async def __create_index_for_model(self, create_index_for_model_request, **kwargs): - """Create an index for an embedding model # noqa: E501 + async def __delete_backup(self, backup_id, **kwargs): + """Delete a backup # noqa: E501 - This operation creates a serverless integrated inference index for a specific embedding model. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available models and model details. # noqa: E501 + Delete a backup. # noqa: E501 Args: - create_index_for_model_request (CreateIndexForModelRequest): The desired configuration for the index and associated embedding model. + backup_id (str): The ID of the backup to delete. Keyword Args: _return_http_data_only (bool): response data without head status @@ -960,24 +1747,24 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg Default is True. Returns: - IndexModel + None """ self._process_openapi_kwargs(kwargs) - kwargs["create_index_for_model_request"] = create_index_for_model_request + kwargs["backup_id"] = backup_id return await self.call_with_http_info(**kwargs) - self.create_index_for_model = _AsyncioEndpoint( + self.delete_backup = _AsyncioEndpoint( settings={ - "response_type": (IndexModel,), + "response_type": None, "auth": ["ApiKeyAuth"], - "endpoint_path": "/indexes/create-for-model", - "operation_id": "create_index_for_model", - "http_method": "POST", + "endpoint_path": "/backups/{backup_id}", + "operation_id": "delete_backup", + "http_method": "DELETE", "servers": None, }, params_map={ - "all": ["create_index_for_model_request"], - "required": ["create_index_for_model_request"], + "all": ["backup_id"], + "required": ["backup_id"], "nullable": [], "enum": [], "validation": [], @@ -985,20 +1772,20 @@ async def __create_index_for_model(self, create_index_for_model_request, **kwarg root_map={ "validations": {}, "allowed_values": {}, - "openapi_types": {"create_index_for_model_request": (CreateIndexForModelRequest,)}, - "attribute_map": {}, - "location_map": {"create_index_for_model_request": "body"}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, "collection_format_map": {}, }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": []}, api_client=api_client, - callable=__create_index_for_model, + callable=__delete_backup, ) async def __delete_collection(self, collection_name, **kwargs): """Delete a collection # noqa: E501 - This operation deletes an existing collection. Serverless indexes do not support collections. # noqa: E501 + Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -1060,7 +1847,7 @@ async def __delete_collection(self, collection_name, **kwargs): async def __delete_index(self, index_name, **kwargs): """Delete an index # noqa: E501 - This operation deletes an existing index. # noqa: E501 + Delete an existing index. # noqa: E501 Args: @@ -1119,10 +1906,72 @@ async def __delete_index(self, index_name, **kwargs): callable=__delete_index, ) + async def __describe_backup(self, backup_id, **kwargs): + """Describe a backup # noqa: E501 + + Get a description of a backup. # noqa: E501 + + + Args: + backup_id (str): The ID of the backup to describe. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["backup_id"] = backup_id + return await self.call_with_http_info(**kwargs) + + self.describe_backup = _AsyncioEndpoint( + settings={ + "response_type": (BackupModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups/{backup_id}", + "operation_id": "describe_backup", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["backup_id"], + "required": ["backup_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"backup_id": (str,)}, + "attribute_map": {"backup_id": "backup_id"}, + "location_map": {"backup_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_backup, + ) + async def __describe_collection(self, collection_name, **kwargs): """Describe a collection # noqa: E501 - This operation gets a description of a collection. Serverless indexes do not support collections. # noqa: E501 + Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 Args: @@ -1243,10 +2092,72 @@ async def __describe_index(self, index_name, **kwargs): callable=__describe_index, ) + async def __describe_restore_job(self, job_id, **kwargs): + """Describe a restore job # noqa: E501 + + Get a description of a restore job. # noqa: E501 + + + Args: + job_id (str): The ID of the restore job to describe. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + RestoreJobModel + """ + self._process_openapi_kwargs(kwargs) + kwargs["job_id"] = job_id + return await self.call_with_http_info(**kwargs) + + self.describe_restore_job = _AsyncioEndpoint( + settings={ + "response_type": (RestoreJobModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs/{job_id}", + "operation_id": "describe_restore_job", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["job_id"], + "required": ["job_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"job_id": (str,)}, + "attribute_map": {"job_id": "job_id"}, + "location_map": {"job_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_restore_job, + ) + async def __list_collections(self, **kwargs): """List collections # noqa: E501 - This operation returns a list of all collections in a project. Serverless indexes do not support collections. # noqa: E501 + List all collections in a project. Serverless indexes do not support collections. # noqa: E501 @@ -1296,10 +2207,86 @@ async def __list_collections(self, **kwargs): callable=__list_collections, ) + async def __list_index_backups(self, index_name, **kwargs): + """List backups for an index # noqa: E501 + + List all backups for an index. # noqa: E501 + + + Args: + index_name (str): Name of the backed up index + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupList + """ + self._process_openapi_kwargs(kwargs) + kwargs["index_name"] = index_name + return await self.call_with_http_info(**kwargs) + + self.list_index_backups = _AsyncioEndpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}/backups", + "operation_id": "list_index_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["index_name", "limit", "pagination_token"], + "required": ["index_name"], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "limit": (int,), + "pagination_token": (str,), + }, + "attribute_map": { + "index_name": "index_name", + "limit": "limit", + "pagination_token": "paginationToken", + }, + "location_map": { + "index_name": "path", + "limit": "query", + "pagination_token": "query", + }, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_index_backups, + ) + async def __list_indexes(self, **kwargs): """List indexes # noqa: E501 - This operation returns a list of all indexes in a project. # noqa: E501 + List all indexes in a project. # noqa: E501 @@ -1348,3 +2335,125 @@ async def __list_indexes(self, **kwargs): api_client=api_client, callable=__list_indexes, ) + + async def __list_project_backups(self, **kwargs): + """List backups for all indexes in a project # noqa: E501 + + List all backups for a project. # noqa: E501 + + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + BackupList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_project_backups = _AsyncioEndpoint( + settings={ + "response_type": (BackupList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/backups", + "operation_id": "list_project_backups", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_project_backups, + ) + + async def __list_restore_jobs(self, **kwargs): + """List restore jobs # noqa: E501 + + List all restore jobs for a project. # noqa: E501 + + + + Keyword Args: + limit (int): The number of results to return per page. [optional] if omitted the server will use the default value of 10. + pagination_token (str): The token to use to retrieve the next page of results. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + RestoreJobList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_restore_jobs = _AsyncioEndpoint( + settings={ + "response_type": (RestoreJobList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/restore-jobs", + "operation_id": "list_restore_jobs", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": ["limit"], + }, + root_map={ + "validations": {("limit",): {"inclusive_maximum": 100, "inclusive_minimum": 1}}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_restore_jobs, + ) diff --git a/pinecone/core/openapi/db_control/model/backup_list.py b/pinecone/core/openapi/db_control/model/backup_list.py new file mode 100644 index 00000000..c485a03d --- /dev/null +++ b/pinecone/core/openapi/db_control/model/backup_list.py @@ -0,0 +1,284 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.backup_model import BackupModel + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + + globals()["BackupModel"] = BackupModel + globals()["PaginationResponse"] = PaginationResponse + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="BackupList") + + +class BackupList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([BackupModel],), # noqa: E501 + "pagination": (PaginationResponse,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """BackupList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([BackupModel]): [optional] # noqa: E501 + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """BackupList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([BackupModel]): [optional] # noqa: E501 + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/backup_model.py b/pinecone/core/openapi/db_control/model/backup_model.py new file mode 100644 index 00000000..5b50ba9d --- /dev/null +++ b/pinecone/core/openapi/db_control/model/backup_model.py @@ -0,0 +1,366 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + + globals()["IndexTags"] = IndexTags + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="BackupModel") + + +class BackupModel(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("metric",): {"COSINE": "cosine", "EUCLIDEAN": "euclidean", "DOTPRODUCT": "dotproduct"} + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "backup_id": (str,), # noqa: E501 + "source_index_name": (str,), # noqa: E501 + "source_index_id": (str,), # noqa: E501 + "status": (str,), # noqa: E501 + "cloud": (str,), # noqa: E501 + "region": (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "description": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "metric": (str,), # noqa: E501 + "record_count": (int,), # noqa: E501 + "namespace_count": (int,), # noqa: E501 + "size_bytes": (int,), # noqa: E501 + "tags": (IndexTags,), # noqa: E501 + "created_at": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "backup_id": "backup_id", # noqa: E501 + "source_index_name": "source_index_name", # noqa: E501 + "source_index_id": "source_index_id", # noqa: E501 + "status": "status", # noqa: E501 + "cloud": "cloud", # noqa: E501 + "region": "region", # noqa: E501 + "name": "name", # noqa: E501 + "description": "description", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "metric": "metric", # noqa: E501 + "record_count": "record_count", # noqa: E501 + "namespace_count": "namespace_count", # noqa: E501 + "size_bytes": "size_bytes", # noqa: E501 + "tags": "tags", # noqa: E501 + "created_at": "created_at", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], + backup_id, + source_index_name, + source_index_id, + status, + cloud, + region, + *args, + **kwargs, + ) -> T: # noqa: E501 + """BackupModel - a model defined in OpenAPI + + Args: + backup_id (str): Unique identifier for the backup. + source_index_name (str): Name of the index from which the backup was taken. + source_index_id (str): ID of the index. + status (str): Current status of the backup (e.g., Initializing, Ready, Failed). + cloud (str): Cloud provider where the backup is stored. + region (str): Cloud region where the backup is stored. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): Optional user-defined name for the backup. [optional] # noqa: E501 + description (str): Optional description providing context for the backup. [optional] # noqa: E501 + dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 + metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. [optional] # noqa: E501 + record_count (int): Total number of records in the backup. [optional] # noqa: E501 + namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501 + size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501 + tags (IndexTags): [optional] # noqa: E501 + created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.backup_id = backup_id + self.source_index_name = source_index_name + self.source_index_id = source_index_id + self.status = status + self.cloud = cloud + self.region = region + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, backup_id, source_index_name, source_index_id, status, cloud, region, *args, **kwargs + ) -> None: # noqa: E501 + """BackupModel - a model defined in OpenAPI + + Args: + backup_id (str): Unique identifier for the backup. + source_index_name (str): Name of the index from which the backup was taken. + source_index_id (str): ID of the index. + status (str): Current status of the backup (e.g., Initializing, Ready, Failed). + cloud (str): Cloud provider where the backup is stored. + region (str): Cloud region where the backup is stored. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): Optional user-defined name for the backup. [optional] # noqa: E501 + description (str): Optional description providing context for the backup. [optional] # noqa: E501 + dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 + metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. [optional] # noqa: E501 + record_count (int): Total number of records in the backup. [optional] # noqa: E501 + namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501 + size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501 + tags (IndexTags): [optional] # noqa: E501 + created_at (str): Timestamp when the backup was created. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.backup_id = backup_id + self.source_index_name = source_index_name + self.source_index_id = source_index_id + self.status = status + self.cloud = cloud + self.region = region + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/byoc_spec.py b/pinecone/core/openapi/db_control/model/byoc_spec.py new file mode 100644 index 00000000..4d7a843d --- /dev/null +++ b/pinecone/core/openapi/db_control/model/byoc_spec.py @@ -0,0 +1,276 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ByocSpec") + + +class ByocSpec(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "environment": (str,) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "environment": "environment" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa: E501 + """ByocSpec - a model defined in OpenAPI + + Args: + environment (str): The environment where the index is hosted. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.environment = environment + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 + """ByocSpec - a model defined in OpenAPI + + Args: + environment (str): The environment where the index is hosted. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.environment = environment + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/collection_list.py b/pinecone/core/openapi/db_control/model/collection_list.py index e36d3d9d..8afb0b7e 100644 --- a/pinecone/core/openapi/db_control/model/collection_list.py +++ b/pinecone/core/openapi/db_control/model/collection_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -146,6 +146,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +165,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +187,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +236,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +253,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/collection_model.py b/pinecone/core/openapi/db_control/model/collection_model.py index 88d2334d..bb8e6577 100644 --- a/pinecone/core/openapi/db_control/model/collection_model.py +++ b/pinecone/core/openapi/db_control/model/collection_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -163,6 +163,8 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) vector_count (int): The number of records stored in the collection. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +182,8 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -203,6 +207,8 @@ def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -257,6 +263,8 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: vector_count (int): The number of records stored in the collection. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +280,8 @@ def __init__(self, name, status, environment, *args, **kwargs) -> None: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request.py b/pinecone/core/openapi/db_control/model/configure_index_request.py index 7f4a5cf3..352166e0 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -165,6 +165,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 embed (ConfigureIndexRequestEmbed): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -182,6 +184,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -202,6 +206,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -252,6 +258,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 embed (ConfigureIndexRequestEmbed): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +275,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py index 55f12ebf..c3b1fc2b 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -147,6 +147,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +166,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +188,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +240,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +257,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py index 484ad71d..5f2b0668 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -150,6 +150,8 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -167,6 +169,8 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +192,8 @@ def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -237,6 +243,8 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +260,8 @@ def __init__(self, pod, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py index 3a3ea96e..91909c75 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_spec_pod.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -143,6 +143,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. [optional] if omitted the server will use the default value of "p1.x1". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -160,6 +162,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -180,6 +184,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -228,6 +234,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. [optional] if omitted the server will use the default value of "p1.x1". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -243,6 +251,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py new file mode 100644 index 00000000..6375f18f --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -0,0 +1,274 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateBackupRequest") + + +class CreateBackupRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "description": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "description": "description", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """CreateBackupRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the backup. [optional] # noqa: E501 + description (str): A description of the backup. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """CreateBackupRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the backup. [optional] # noqa: E501 + description (str): A description of the backup. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/create_collection_request.py b/pinecone/core/openapi/db_control/model/create_collection_request.py index 0e14dc22..544d5f96 100644 --- a/pinecone/core/openapi/db_control/model/create_collection_request.py +++ b/pinecone/core/openapi/db_control/model/create_collection_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -145,6 +145,8 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +164,8 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +188,8 @@ def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +240,8 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +257,8 @@ def __init__(self, name, source, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py index c7d889a2..6fe5fe79 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -173,6 +173,8 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs tags (IndexTags): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -190,6 +192,8 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -214,6 +218,8 @@ def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -268,6 +274,8 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa tags (IndexTags): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -283,6 +291,8 @@ def __init__(self, name, cloud, region, embed, *args, **kwargs) -> None: # noqa ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py index 88de090a..63693c90 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -89,6 +89,7 @@ def openapi_types(cls): "model": (str,), # noqa: E501 "field_map": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 "metric": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 "read_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 "write_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 } @@ -101,6 +102,7 @@ def discriminator(cls): "model": "model", # noqa: E501 "field_map": "field_map", # noqa: E501 "metric": "metric", # noqa: E501 + "dimension": "dimension", # noqa: E501 "read_parameters": "read_parameters", # noqa: E501 "write_parameters": "write_parameters", # noqa: E501 } @@ -150,10 +152,13 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # through its discriminator because we passed in _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. [optional] # noqa: E501 + dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +176,8 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +200,8 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -242,10 +251,13 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. [optional] # noqa: E501 + dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +273,8 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py new file mode 100644 index 00000000..1070f4eb --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py @@ -0,0 +1,296 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.deletion_protection import DeletionProtection + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + + globals()["DeletionProtection"] = DeletionProtection + globals()["IndexTags"] = IndexTags + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateIndexFromBackupRequest") + + +class CreateIndexFromBackupRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 45, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "name": (str,), # noqa: E501 + "tags": (IndexTags,), # noqa: E501 + "deletion_protection": (DeletionProtection,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "tags": "tags", # noqa: E501 + "deletion_protection": "deletion_protection", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 + """CreateIndexFromBackupRequest - a model defined in OpenAPI + + Args: + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + tags (IndexTags): [optional] # noqa: E501 + deletion_protection (DeletionProtection): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 + """CreateIndexFromBackupRequest - a model defined in OpenAPI + + Args: + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + tags (IndexTags): [optional] # noqa: E501 + deletion_protection (DeletionProtection): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py new file mode 100644 index 00000000..360df0c2 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py @@ -0,0 +1,282 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateIndexFromBackupResponse") + + +class CreateIndexFromBackupResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "restore_job_id": (str,), # noqa: E501 + "index_id": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "restore_job_id": "restore_job_id", # noqa: E501 + "index_id": "index_id", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) -> T: # noqa: E501 + """CreateIndexFromBackupResponse - a model defined in OpenAPI + + Args: + restore_job_id (str): The ID of the restore job that was created. + index_id (str): The ID of the index that was created from the backup. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.index_id = index_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, restore_job_id, index_id, *args, **kwargs) -> None: # noqa: E501 + """CreateIndexFromBackupResponse - a model defined in OpenAPI + + Args: + restore_job_id (str): The ID of the restore job that was created. + index_id (str): The ID of the index that was created from the backup. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.index_id = index_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/create_index_request.py b/pinecone/core/openapi/db_control/model/create_index_request.py index ae61e95a..06c11c97 100644 --- a/pinecone/core/openapi/db_control/model/create_index_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -175,6 +175,8 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -192,6 +194,8 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -214,6 +218,8 @@ def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -269,6 +275,8 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -284,6 +292,8 @@ def __init__(self, name, spec, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/deletion_protection.py b/pinecone/core/openapi/db_control/model/deletion_protection.py index 24fc26f3..c70945a2 100644 --- a/pinecone/core/openapi/db_control/model/deletion_protection.py +++ b/pinecone/core/openapi/db_control/model/deletion_protection.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -95,6 +95,8 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -111,10 +113,10 @@ def __init__(self, *args, **kwargs) -> None: Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 Keyword Args: - value (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + value (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. . if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -167,12 +169,16 @@ def __init__(self, *args, **kwargs) -> None: if value is None: value = "disabled" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -195,10 +201,10 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + args[0] (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 Keyword Args: - value (str): Whether [deletion protection](http://docs.pinecone.io/guides/indexes/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 + value (str): Whether [deletion protection](http://docs.pinecone.io/guides/manage-data/manage-indexes#configure-deletion-protection) is enabled/disabled for the index. if omitted defaults to "disabled", must be one of ["disabled", "enabled", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -253,12 +259,16 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: if value is None: value = "disabled" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response.py b/pinecone/core/openapi/db_control/model/error_response.py index b37c2e97..23445308 100644 --- a/pinecone/core/openapi/db_control/model/error_response.py +++ b/pinecone/core/openapi/db_control/model/error_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -151,6 +151,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +170,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +194,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +246,8 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +263,8 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/error_response_error.py b/pinecone/core/openapi/db_control/model/error_response_error.py index 613ccc4f..30cc62ac 100644 --- a/pinecone/core/openapi/db_control/model/error_response_error.py +++ b/pinecone/core/openapi/db_control/model/error_response_error.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -169,6 +169,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -186,6 +188,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -208,6 +212,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -259,6 +265,8 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -274,6 +282,8 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_list.py b/pinecone/core/openapi/db_control/model/index_list.py index ed6c08df..b2f7468e 100644 --- a/pinecone/core/openapi/db_control/model/index_list.py +++ b/pinecone/core/openapi/db_control/model/index_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -146,6 +146,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +165,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +187,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +236,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +253,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model.py b/pinecone/core/openapi/db_control/model/index_model.py index 3fe45763..97ada3aa 100644 --- a/pinecone/core/openapi/db_control/model/index_model.py +++ b/pinecone/core/openapi/db_control/model/index_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -189,6 +189,8 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** """ vector_type = kwargs.get("vector_type", "dense") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -206,6 +208,8 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -232,6 +236,8 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -291,6 +297,8 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: """ vector_type = kwargs.get("vector_type", "dense") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -306,6 +314,8 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_spec.py b/pinecone/core/openapi/db_control/model/index_model_spec.py index b27ec0f8..7fc5452b 100644 --- a/pinecone/core/openapi/db_control/model/index_model_spec.py +++ b/pinecone/core/openapi/db_control/model/index_model_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -28,9 +28,11 @@ def lazy_import(): + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + globals()["ByocSpec"] = ByocSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -94,6 +96,7 @@ def openapi_types(cls): """ lazy_import() return { + "byoc": (ByocSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 "serverless": (ServerlessSpec,), # noqa: E501 } @@ -103,6 +106,7 @@ def discriminator(cls): return None attribute_map: Dict[str, str] = { + "byoc": "byoc", # noqa: E501 "pod": "pod", # noqa: E501 "serverless": "serverless", # noqa: E501 } @@ -147,10 +151,13 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + byoc (ByocSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +175,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +197,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -232,10 +243,13 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + byoc (ByocSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -251,6 +265,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_model_status.py b/pinecone/core/openapi/db_control/model/index_model_status.py index 0128f22e..52821c68 100644 --- a/pinecone/core/openapi/db_control/model/index_model_status.py +++ b/pinecone/core/openapi/db_control/model/index_model_status.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -69,6 +69,7 @@ class IndexModelStatus(ModelNormal): "SCALINGDOWNPODSIZE": "ScalingDownPodSize", "TERMINATING": "Terminating", "READY": "Ready", + "DISABLED": "Disabled", } } @@ -154,6 +155,8 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +174,8 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +198,8 @@ def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -243,6 +250,8 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -258,6 +267,8 @@ def __init__(self, ready, state, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index 6a8c4db5..fe1ac44e 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -28,9 +28,11 @@ def lazy_import(): + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + globals()["ByocSpec"] = ByocSpec globals()["PodSpec"] = PodSpec globals()["ServerlessSpec"] = ServerlessSpec @@ -89,6 +91,7 @@ def openapi_types(cls): return { "serverless": (ServerlessSpec,), # noqa: E501 "pod": (PodSpec,), # noqa: E501 + "byoc": (ByocSpec,), # noqa: E501 } @cached_class_property @@ -98,6 +101,7 @@ def discriminator(cls): attribute_map: Dict[str, str] = { "serverless": "serverless", # noqa: E501 "pod": "pod", # noqa: E501 + "byoc": "byoc", # noqa: E501 } read_only_vars: Set[str] = set([]) @@ -142,8 +146,11 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -161,6 +168,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -181,6 +190,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -227,8 +238,11 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) serverless (ServerlessSpec): [optional] # noqa: E501 pod (PodSpec): [optional] # noqa: E501 + byoc (ByocSpec): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -244,6 +258,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/index_tags.py b/pinecone/core/openapi/db_control/model/index_tags.py index 6111f7b5..62f17fb0 100644 --- a/pinecone/core/openapi/db_control/model/index_tags.py +++ b/pinecone/core/openapi/db_control/model/index_tags.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -133,6 +133,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -150,6 +152,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -170,6 +174,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -216,6 +222,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -231,6 +239,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/model_index_embed.py b/pinecone/core/openapi/db_control/model/model_index_embed.py index 624462a8..1a7f2010 100644 --- a/pinecone/core/openapi/db_control/model/model_index_embed.py +++ b/pinecone/core/openapi/db_control/model/model_index_embed.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -162,6 +162,8 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +181,8 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +204,8 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -255,6 +261,8 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -270,6 +278,8 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pagination_response.py b/pinecone/core/openapi/db_control/model/pagination_response.py new file mode 100644 index 00000000..8a954cc4 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/pagination_response.py @@ -0,0 +1,276 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="PaginationResponse") + + +class PaginationResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "next": (str,) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "next": "next" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 + """PaginationResponse - a model defined in OpenAPI + + Args: + next (str): The token to use to retrieve the next page of results. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.next = next + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, next, *args, **kwargs) -> None: # noqa: E501 + """PaginationResponse - a model defined in OpenAPI + + Args: + next (str): The token to use to retrieve the next page of results. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.next = next + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/pod_spec.py b/pinecone/core/openapi/db_control/model/pod_spec.py index c430c6a3..64c0b2a7 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec.py +++ b/pinecone/core/openapi/db_control/model/pod_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -173,6 +173,8 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa """ pod_type = kwargs.get("pod_type", "p1.x1") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -190,6 +192,8 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -212,6 +216,8 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -268,6 +274,8 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 """ pod_type = kwargs.get("pod_type", "p1.x1") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -283,6 +291,8 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py index 969471e6..e605a141 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py +++ b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/model/restore_job_list.py b/pinecone/core/openapi/db_control/model/restore_job_list.py new file mode 100644 index 00000000..2f39d91c --- /dev/null +++ b/pinecone/core/openapi/db_control/model/restore_job_list.py @@ -0,0 +1,290 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel + + globals()["PaginationResponse"] = PaginationResponse + globals()["RestoreJobModel"] = RestoreJobModel + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="RestoreJobList") + + +class RestoreJobList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([RestoreJobModel],), # noqa: E501 + "pagination": (PaginationResponse,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 + """RestoreJobList - a model defined in OpenAPI + + Args: + data ([RestoreJobModel]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.data = data + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, data, *args, **kwargs) -> None: # noqa: E501 + """RestoreJobList - a model defined in OpenAPI + + Args: + data ([RestoreJobModel]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + pagination (PaginationResponse): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.data = data + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/restore_job_model.py b/pinecone/core/openapi/db_control/model/restore_job_model.py new file mode 100644 index 00000000..951200d1 --- /dev/null +++ b/pinecone/core/openapi/db_control/model/restore_job_model.py @@ -0,0 +1,336 @@ +""" +Pinecone Control Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="RestoreJobModel") + + +class RestoreJobModel(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("percent_complete",): {"inclusive_maximum": 100.0, "inclusive_minimum": 0.0} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "restore_job_id": (str,), # noqa: E501 + "backup_id": (str,), # noqa: E501 + "target_index_name": (str,), # noqa: E501 + "target_index_id": (str,), # noqa: E501 + "status": (str,), # noqa: E501 + "created_at": (datetime,), # noqa: E501 + "completed_at": (datetime,), # noqa: E501 + "percent_complete": (float,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "restore_job_id": "restore_job_id", # noqa: E501 + "backup_id": "backup_id", # noqa: E501 + "target_index_name": "target_index_name", # noqa: E501 + "target_index_id": "target_index_id", # noqa: E501 + "status": "status", # noqa: E501 + "created_at": "created_at", # noqa: E501 + "completed_at": "completed_at", # noqa: E501 + "percent_complete": "percent_complete", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], + restore_job_id, + backup_id, + target_index_name, + target_index_id, + status, + created_at, + *args, + **kwargs, + ) -> T: # noqa: E501 + """RestoreJobModel - a model defined in OpenAPI + + Args: + restore_job_id (str): Unique identifier for the restore job + backup_id (str): Backup used for the restore + target_index_name (str): Name of the index into which data is being restored + target_index_id (str): ID of the index + status (str): Status of the restore job + created_at (datetime): Timestamp when the restore job started + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + completed_at (datetime): Timestamp when the restore job finished [optional] # noqa: E501 + percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.backup_id = backup_id + self.target_index_name = target_index_name + self.target_index_id = target_index_id + self.status = status + self.created_at = created_at + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, + restore_job_id, + backup_id, + target_index_name, + target_index_id, + status, + created_at, + *args, + **kwargs, + ) -> None: # noqa: E501 + """RestoreJobModel - a model defined in OpenAPI + + Args: + restore_job_id (str): Unique identifier for the restore job + backup_id (str): Backup used for the restore + target_index_name (str): Name of the index into which data is being restored + target_index_id (str): ID of the index + status (str): Status of the restore job + created_at (datetime): Timestamp when the restore job started + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + completed_at (datetime): Timestamp when the restore job finished [optional] # noqa: E501 + percent_complete (float): The progress made by the restore job out of 100 [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.restore_job_id = restore_job_id + self.backup_id = backup_id + self.target_index_name = target_index_name + self.target_index_id = target_index_id + self.status = status + self.created_at = created_at + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_control/model/serverless_spec.py b/pinecone/core/openapi/db_control/model/serverless_spec.py index 58e39e48..efa9157e 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -145,6 +145,8 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -162,6 +164,8 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +188,8 @@ def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +240,8 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +257,8 @@ def __init__(self, cloud, region, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_control/models/__init__.py b/pinecone/core/openapi/db_control/models/__init__.py index fd82f121..99c3bb9d 100644 --- a/pinecone/core/openapi/db_control/models/__init__.py +++ b/pinecone/core/openapi/db_control/models/__init__.py @@ -9,6 +9,9 @@ # import sys # sys.setrecursionlimit(n) +from pinecone.core.openapi.db_control.model.backup_list import BackupList +from pinecone.core.openapi.db_control.model.backup_model import BackupModel +from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec from pinecone.core.openapi.db_control.model.collection_list import CollectionList from pinecone.core.openapi.db_control.model.collection_model import CollectionModel from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest @@ -21,6 +24,7 @@ from pinecone.core.openapi.db_control.model.configure_index_request_spec_pod import ( ConfigureIndexRequestSpecPod, ) +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, @@ -28,6 +32,12 @@ from pinecone.core.openapi.db_control.model.create_index_for_model_request_embed import ( CreateIndexForModelRequestEmbed, ) +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) +from pinecone.core.openapi.db_control.model.create_index_from_backup_response import ( + CreateIndexFromBackupResponse, +) from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest from pinecone.core.openapi.db_control.model.deletion_protection import DeletionProtection from pinecone.core.openapi.db_control.model.error_response import ErrorResponse @@ -39,6 +49,9 @@ from pinecone.core.openapi.db_control.model.index_spec import IndexSpec from pinecone.core.openapi.db_control.model.index_tags import IndexTags from pinecone.core.openapi.db_control.model.model_index_embed import ModelIndexEmbed +from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse from pinecone.core.openapi.db_control.model.pod_spec import PodSpec from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig +from pinecone.core.openapi.db_control.model.restore_job_list import RestoreJobList +from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec diff --git a/pinecone/core/openapi/db_data/__init__.py b/pinecone/core/openapi/db_data/__init__.py index c878a548..76701561 100644 --- a/pinecone/core/openapi/db_data/__init__.py +++ b/pinecone/core/openapi/db_data/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/db_data/api/bulk_operations_api.py b/pinecone/core/openapi/db_data/api/bulk_operations_api.py index fcfbac8a..854e37af 100644 --- a/pinecone/core/openapi/db_data/api/bulk_operations_api.py +++ b/pinecone/core/openapi/db_data/api/bulk_operations_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -44,7 +44,7 @@ def __init__(self, api_client=None) -> None: def __cancel_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): """Cancel an import # noqa: E501 - Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -113,7 +113,7 @@ def __cancel_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): def __describe_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): """Describe an import # noqa: E501 - Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -182,7 +182,7 @@ def __describe_bulk_import(self, id, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_bulk_imports(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, this returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -250,7 +250,7 @@ def __list_bulk_imports(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __start_bulk_import(self, start_import_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Start import # noqa: E501 - Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -331,7 +331,7 @@ def __init__(self, api_client=None) -> None: async def __cancel_bulk_import(self, id, **kwargs): """Cancel an import # noqa: E501 - Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: @@ -393,7 +393,7 @@ async def __cancel_bulk_import(self, id, **kwargs): async def __describe_bulk_import(self, id, **kwargs): """Describe an import # noqa: E501 - Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: @@ -455,7 +455,7 @@ async def __describe_bulk_import(self, id, **kwargs): async def __list_bulk_imports(self, **kwargs): """List imports # noqa: E501 - List all recent and ongoing import operations. By default, this returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -516,7 +516,7 @@ async def __list_bulk_imports(self, **kwargs): async def __start_bulk_import(self, start_import_request, **kwargs): """Start import # noqa: E501 - Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/data/import-data). # noqa: E501 + Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/api/namespace_operations_api.py b/pinecone/core/openapi/db_data/api/namespace_operations_api.py new file mode 100644 index 00000000..e28e7430 --- /dev/null +++ b/pinecone/core/openapi/db_data/api/namespace_operations_api.py @@ -0,0 +1,443 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support import ApiClient, AsyncioApiClient +from pinecone.openapi_support.endpoint_utils import ( + ExtraOpenApiKwargsTypedDict, + KwargsWithOpenApiKwargDefaultsTypedDict, +) +from pinecone.openapi_support.endpoint import Endpoint as _Endpoint, ExtraOpenApiKwargsTypedDict +from pinecone.openapi_support.asyncio_endpoint import AsyncioEndpoint as _AsyncioEndpoint +from pinecone.openapi_support.model_utils import ( # noqa: F401 + date, + datetime, + file_type, + none_type, + validate_and_convert_types, +) +from pinecone.core.openapi.db_data.model.list_namespaces_response import ListNamespacesResponse +from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription +from pinecone.core.openapi.db_data.model.rpc_status import RpcStatus + + +class NamespaceOperationsApi: + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def __delete_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a namespace # noqa: E501 + + Delete a namespace from an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_namespace(namespace, async_req=True) + >>> result = thread.get() + + Args: + namespace (str): The namespace to delete + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + {str: (bool, dict, float, int, list, str, none_type)} + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return self.call_with_http_info(**kwargs) + + self.delete_namespace = _Endpoint( + settings={ + "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "delete_namespace", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_namespace, + ) + + def __describe_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): + """Describe a namespace # noqa: E501 + + Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_namespace(namespace, async_req=True) + >>> result = thread.get() + + Args: + namespace (str): The namespace to describe + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + NamespaceDescription + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return self.call_with_http_info(**kwargs) + + self.describe_namespace = _Endpoint( + settings={ + "response_type": (NamespaceDescription,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "describe_namespace", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_namespace, + ) + + def __list_namespaces_operation(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get list of all namespaces # noqa: E501 + + Get a list of all namespaces within an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_namespaces_operation(async_req=True) + >>> result = thread.get() + + + Keyword Args: + limit (int): Max number namespaces to return per page. [optional] + pagination_token (str): Pagination token to continue a previous listing operation. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ListNamespacesResponse + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_namespaces_operation = _Endpoint( + settings={ + "response_type": (ListNamespacesResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces", + "operation_id": "list_namespaces_operation", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_namespaces_operation, + ) + + +class AsyncioNamespaceOperationsApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __delete_namespace(self, namespace, **kwargs): + """Delete a namespace # noqa: E501 + + Delete a namespace from an index. # noqa: E501 + + + Args: + namespace (str): The namespace to delete + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + {str: (bool, dict, float, int, list, str, none_type)} + """ + self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return await self.call_with_http_info(**kwargs) + + self.delete_namespace = _AsyncioEndpoint( + settings={ + "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "delete_namespace", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_namespace, + ) + + async def __describe_namespace(self, namespace, **kwargs): + """Describe a namespace # noqa: E501 + + Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + + + Args: + namespace (str): The namespace to describe + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + NamespaceDescription + """ + self._process_openapi_kwargs(kwargs) + kwargs["namespace"] = namespace + return await self.call_with_http_info(**kwargs) + + self.describe_namespace = _AsyncioEndpoint( + settings={ + "response_type": (NamespaceDescription,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces/{namespace}", + "operation_id": "describe_namespace", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["namespace"], + "required": ["namespace"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"namespace": (str,)}, + "attribute_map": {"namespace": "namespace"}, + "location_map": {"namespace": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__describe_namespace, + ) + + async def __list_namespaces_operation(self, **kwargs): + """Get list of all namespaces # noqa: E501 + + Get a list of all namespaces within an index. # noqa: E501 + + + + Keyword Args: + limit (int): Max number namespaces to return per page. [optional] + pagination_token (str): Pagination token to continue a previous listing operation. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ListNamespacesResponse + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_namespaces_operation = _AsyncioEndpoint( + settings={ + "response_type": (ListNamespacesResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/namespaces", + "operation_id": "list_namespaces_operation", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["limit", "pagination_token"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"limit": (int,), "pagination_token": (str,)}, + "attribute_map": {"limit": "limit", "pagination_token": "paginationToken"}, + "location_map": {"limit": "query", "pagination_token": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_namespaces_operation, + ) diff --git a/pinecone/core/openapi/db_data/api/vector_operations_api.py b/pinecone/core/openapi/db_data/api/vector_operations_api.py index 22bb47c3..25ad63c5 100644 --- a/pinecone/core/openapi/db_data/api/vector_operations_api.py +++ b/pinecone/core/openapi/db_data/api/vector_operations_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -55,7 +55,7 @@ def __init__(self, api_client=None) -> None: def __delete_vectors(self, delete_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Delete vectors # noqa: E501 - Delete vectors, by id, from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -195,7 +195,7 @@ def __describe_index_stats( def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): """Fetch vectors # noqa: E501 - Look up and return vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -265,7 +265,7 @@ def __fetch_vectors(self, ids, **kwargs: ExtraOpenApiKwargsTypedDict): def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. This returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -348,9 +348,9 @@ def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): ) def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): - """Query vectors # noqa: E501 + """Search with a vector # noqa: E501 - Search a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -419,9 +419,9 @@ def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): def __search_records_namespace( self, namespace, search_records_request, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Search a namespace # noqa: E501 + """Search with text # noqa: E501 - This operation converts a query to a vector embedding and then searches a namespace using the embedding. It returns the most similar records in the namespace, along with their similarity scores. # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -495,7 +495,7 @@ def __search_records_namespace( def __update_vector(self, update_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Update a vector # noqa: E501 - Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/data/update-data). # noqa: E501 + Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -564,9 +564,9 @@ def __update_vector(self, update_request, **kwargs: ExtraOpenApiKwargsTypedDict) def __upsert_records_namespace( self, namespace, upsert_record, **kwargs: ExtraOpenApiKwargsTypedDict ): - """Upsert records into a namespace # noqa: E501 + """Upsert text # noqa: E501 - This operation converts input data to vector embeddings and then upserts the embeddings into a namespace. # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -637,7 +637,7 @@ def __upsert_records_namespace( def __upsert_vectors(self, upsert_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Upsert vectors # noqa: E501 - Write vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -718,7 +718,7 @@ def __init__(self, api_client=None) -> None: async def __delete_vectors(self, delete_request, **kwargs): """Delete vectors # noqa: E501 - Delete vectors, by id, from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/data/delete-data). # noqa: E501 + Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 Args: @@ -842,7 +842,7 @@ async def __describe_index_stats(self, describe_index_stats_request, **kwargs): async def __fetch_vectors(self, ids, **kwargs): """Fetch vectors # noqa: E501 - Look up and return vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/data/fetch-data). # noqa: E501 + Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 Args: @@ -905,7 +905,7 @@ async def __fetch_vectors(self, ids, **kwargs): async def __list_vectors(self, **kwargs): """List vector IDs # noqa: E501 - List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. This returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 + List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -981,9 +981,9 @@ async def __list_vectors(self, **kwargs): ) async def __query_vectors(self, query_request, **kwargs): - """Query vectors # noqa: E501 + """Search with a vector # noqa: E501 - Search a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/guides/data/query-data). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 Args: @@ -1043,9 +1043,9 @@ async def __query_vectors(self, query_request, **kwargs): ) async def __search_records_namespace(self, namespace, search_records_request, **kwargs): - """Search a namespace # noqa: E501 + """Search with text # noqa: E501 - This operation converts a query to a vector embedding and then searches a namespace using the embedding. It returns the most similar records in the namespace, along with their similarity scores. # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 Args: @@ -1112,7 +1112,7 @@ async def __search_records_namespace(self, namespace, search_records_request, ** async def __update_vector(self, update_request, **kwargs): """Update a vector # noqa: E501 - Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/data/update-data). # noqa: E501 + Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 Args: @@ -1172,9 +1172,9 @@ async def __update_vector(self, update_request, **kwargs): ) async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): - """Upsert records into a namespace # noqa: E501 + """Upsert text # noqa: E501 - This operation converts input data to vector embeddings and then upserts the embeddings into a namespace. # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 Args: @@ -1238,7 +1238,7 @@ async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): async def __upsert_vectors(self, upsert_request, **kwargs): """Upsert vectors # noqa: E501 - Write vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/data/upsert-data). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/apis/__init__.py b/pinecone/core/openapi/db_data/apis/__init__.py index 3d7112fd..a5caa981 100644 --- a/pinecone/core/openapi/db_data/apis/__init__.py +++ b/pinecone/core/openapi/db_data/apis/__init__.py @@ -14,4 +14,5 @@ # Import APIs into API package: from pinecone.core.openapi.db_data.api.bulk_operations_api import BulkOperationsApi +from pinecone.core.openapi.db_data.api.namespace_operations_api import NamespaceOperationsApi from pinecone.core.openapi.db_data.api.vector_operations_api import VectorOperationsApi diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index 79e39905..0d3409a7 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -144,9 +144,11 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -164,6 +166,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -184,6 +188,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -231,9 +237,11 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +257,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index 769f8dcb..6c54d92f 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -135,9 +135,11 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -219,9 +225,11 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/fetch_response.py b/pinecone/core/openapi/db_data/model/fetch_response.py index 6a918dbc..092fad1c 100644 --- a/pinecone/core/openapi/db_data/model/fetch_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -154,6 +154,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +173,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +195,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +246,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +263,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/hit.py b/pinecone/core/openapi/db_data/model/hit.py index 8af69b37..1a7431d1 100644 --- a/pinecone/core/openapi/db_data/model/hit.py +++ b/pinecone/core/openapi/db_data/model/hit.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -146,6 +146,8 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +165,8 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +190,8 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -237,6 +243,8 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -252,6 +260,8 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_error_mode.py b/pinecone/core/openapi/db_data/model/import_error_mode.py index dd8a0e71..2f320d88 100644 --- a/pinecone/core/openapi/db_data/model/import_error_mode.py +++ b/pinecone/core/openapi/db_data/model/import_error_mode.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -140,6 +140,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 on_error (str): Indicates how to respond to errors during the import process. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +181,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +230,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 on_error (str): Indicates how to respond to errors during the import process. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +247,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/import_model.py b/pinecone/core/openapi/db_data/model/import_model.py index d1d4589f..6bb3c296 100644 --- a/pinecone/core/openapi/db_data/model/import_model.py +++ b/pinecone/core/openapi/db_data/model/import_model.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -170,6 +170,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 error (str): The error message if the import process failed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -187,6 +189,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -207,6 +211,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -261,6 +267,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 error (str): The error message if the import process failed. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -276,6 +284,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/index_description.py b/pinecone/core/openapi/db_data/model/index_description.py index 1bbc7198..0af2c259 100644 --- a/pinecone/core/openapi/db_data/model/index_description.py +++ b/pinecone/core/openapi/db_data/model/index_description.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -161,6 +161,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 vector_type (str): The type of vectors stored in the index. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -178,6 +180,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +202,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +256,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 vector_type (str): The type of vectors stored in the index. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -265,6 +273,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_imports_response.py b/pinecone/core/openapi/db_data/model/list_imports_response.py index 1b485e8b..d2321fb8 100644 --- a/pinecone/core/openapi/db_data/model/list_imports_response.py +++ b/pinecone/core/openapi/db_data/model/list_imports_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -151,6 +151,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +170,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +192,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -236,6 +242,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 pagination (Pagination): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -251,6 +259,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_item.py b/pinecone/core/openapi/db_data/model/list_item.py index ad58c600..22d2e0fd 100644 --- a/pinecone/core/openapi/db_data/model/list_item.py +++ b/pinecone/core/openapi/db_data/model/list_item.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 id (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 id (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/list_namespaces_response.py b/pinecone/core/openapi/db_data/model/list_namespaces_response.py new file mode 100644 index 00000000..5bbc61be --- /dev/null +++ b/pinecone/core/openapi/db_data/model/list_namespaces_response.py @@ -0,0 +1,284 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription + from pinecone.core.openapi.db_data.model.pagination import Pagination + + globals()["NamespaceDescription"] = NamespaceDescription + globals()["Pagination"] = Pagination + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ListNamespacesResponse") + + +class ListNamespacesResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "namespaces": ([NamespaceDescription],), # noqa: E501 + "pagination": (Pagination,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "namespaces": "namespaces", # noqa: E501 + "pagination": "pagination", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """ListNamespacesResponse - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + namespaces ([NamespaceDescription]): The list of namespaces belonging to this index. [optional] # noqa: E501 + pagination (Pagination): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """ListNamespacesResponse - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + namespaces ([NamespaceDescription]): The list of namespaces belonging to this index. [optional] # noqa: E501 + pagination (Pagination): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_data/model/list_response.py b/pinecone/core/openapi/db_data/model/list_response.py index 4a8d0c5e..f5ea54af 100644 --- a/pinecone/core/openapi/db_data/model/list_response.py +++ b/pinecone/core/openapi/db_data/model/list_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -159,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +178,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +200,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +252,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +269,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/namespace_description.py b/pinecone/core/openapi/db_data/model/namespace_description.py new file mode 100644 index 00000000..abd3fc50 --- /dev/null +++ b/pinecone/core/openapi/db_data/model/namespace_description.py @@ -0,0 +1,274 @@ +""" +Pinecone Data Plane API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="NamespaceDescription") + + +class NamespaceDescription(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "record_count": (int,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "record_count": "record_count", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """NamespaceDescription - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the namespace. [optional] # noqa: E501 + record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """NamespaceDescription - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the namespace. [optional] # noqa: E501 + record_count (int): The total amount of records within the namespace. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/db_data/model/namespace_summary.py b/pinecone/core/openapi/db_data/model/namespace_summary.py index a2ba425a..752f95ee 100644 --- a/pinecone/core/openapi/db_data/model/namespace_summary.py +++ b/pinecone/core/openapi/db_data/model/namespace_summary.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/pagination.py b/pinecone/core/openapi/db_data/model/pagination.py index f93898c8..6ddb4973 100644 --- a/pinecone/core/openapi/db_data/model/pagination.py +++ b/pinecone/core/openapi/db_data/model/pagination.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 next (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 next (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_any.py b/pinecone/core/openapi/db_data/model/protobuf_any.py index 6e2c3c54..fe7f54c2 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_any.py +++ b/pinecone/core/openapi/db_data/model/protobuf_any.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -141,6 +141,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 value (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -158,6 +160,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +182,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -226,6 +232,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 value (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -241,6 +249,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/protobuf_null_value.py b/pinecone/core/openapi/db_data/model/protobuf_null_value.py index 02ac25b8..ecf6b359 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_null_value.py +++ b/pinecone/core/openapi/db_data/model/protobuf_null_value.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -95,6 +95,8 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -167,12 +169,16 @@ def __init__(self, *args, **kwargs) -> None: if value is None: value = "NULL_VALUE" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -253,12 +259,16 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: if value is None: value = "NULL_VALUE" + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 23763e6d..989ad83e 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -170,15 +170,17 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. [optional] # noqa: E501 sparse_vector (SparseValues): [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -196,6 +198,8 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -217,6 +221,8 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -265,15 +271,17 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/data/understanding-metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. [optional] # noqa: E501 sparse_vector (SparseValues): [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. [optional] # noqa: E501 + id (str): The unique ID of the vector to be used as a query vector. Each request can contain either the `vector` or `id` parameter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -289,6 +297,8 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_response.py b/pinecone/core/openapi/db_data/model/query_response.py index fde3a55d..9d693f34 100644 --- a/pinecone/core/openapi/db_data/model/query_response.py +++ b/pinecone/core/openapi/db_data/model/query_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -159,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +178,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +200,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -246,6 +252,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 usage (Usage): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +269,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/query_vector.py b/pinecone/core/openapi/db_data/model/query_vector.py index 30f4c8de..3ea0196f 100644 --- a/pinecone/core/openapi/db_data/model/query_vector.py +++ b/pinecone/core/openapi/db_data/model/query_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -163,6 +163,8 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +182,8 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +205,8 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +260,8 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -269,6 +277,8 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/rpc_status.py b/pinecone/core/openapi/db_data/model/rpc_status.py index fa824641..ac8da180 100644 --- a/pinecone/core/openapi/db_data/model/rpc_status.py +++ b/pinecone/core/openapi/db_data/model/rpc_status.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -152,6 +152,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +171,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +193,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +244,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -253,6 +261,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/scored_vector.py b/pinecone/core/openapi/db_data/model/scored_vector.py index 87d51d94..61f28530 100644 --- a/pinecone/core/openapi/db_data/model/scored_vector.py +++ b/pinecone/core/openapi/db_data/model/scored_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -162,6 +162,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -179,6 +181,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +204,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -253,6 +259,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -268,6 +276,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request.py b/pinecone/core/openapi/db_data/model/search_records_request.py index 9393c415..19b0ba55 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request.py +++ b/pinecone/core/openapi/db_data/model/search_records_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -156,10 +156,12 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - fields ([str]): The fields to return in the search results. [optional] # noqa: E501 + fields ([str]): The fields to return in the search results. If not specified, the response will include all fields. [optional] # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +179,8 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +202,8 @@ def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -245,10 +251,12 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - fields ([str]): The fields to return in the search results. [optional] # noqa: E501 + fields ([str]): The fields to return in the search results. If not specified, the response will include all fields. [optional] # noqa: E501 rerank (SearchRecordsRequestRerank): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -264,6 +272,8 @@ def __init__(self, query, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index 5cedde38..790dbf82 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -121,7 +121,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 """SearchRecordsRequestQuery - a model defined in OpenAPI Args: - top_k (int): The number of results to return for each search. + top_k (int): The number of similar records to return. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -154,12 +154,14 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +179,8 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +202,8 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -212,7 +218,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 """SearchRecordsRequestQuery - a model defined in OpenAPI Args: - top_k (int): The number of results to return for each search. + top_k (int): The number of similar records to return. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -245,12 +251,14 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +274,8 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index 6b6515bb..b365a7d3 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -113,8 +113,8 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: """SearchRecordsRequestRerank - a model defined in OpenAPI Args: - model (str): The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). + model (str): The name of the [reranking model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use. + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -148,10 +148,12 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +171,8 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -191,6 +195,8 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -205,8 +211,8 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 """SearchRecordsRequestRerank - a model defined in OpenAPI Args: - model (str): The name of the [reranking model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use. - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). + model (str): The name of the [reranking model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use. + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -240,10 +246,12 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +267,8 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response.py b/pinecone/core/openapi/db_data/model/search_records_response.py index 440fa71b..229b60dd 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response.py +++ b/pinecone/core/openapi/db_data/model/search_records_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -155,6 +155,8 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -172,6 +174,8 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,6 +198,8 @@ def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +250,8 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +267,8 @@ def __init__(self, result, usage, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_response_result.py b/pinecone/core/openapi/db_data/model/search_records_response_result.py index 87ecf017..ab04277f 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response_result.py +++ b/pinecone/core/openapi/db_data/model/search_records_response_result.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -148,6 +148,8 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -165,6 +167,8 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +190,8 @@ def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -235,6 +241,8 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +258,8 @@ def __init__(self, hits, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_records_vector.py b/pinecone/core/openapi/db_data/model/search_records_vector.py index d2466cd7..34afe2cf 100644 --- a/pinecone/core/openapi/db_data/model/search_records_vector.py +++ b/pinecone/core/openapi/db_data/model/search_records_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -152,6 +152,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -169,6 +171,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +193,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -238,6 +244,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -253,6 +261,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_usage.py b/pinecone/core/openapi/db_data/model/search_usage.py index 565efc9f..c4444c8b 100644 --- a/pinecone/core/openapi/db_data/model/search_usage.py +++ b/pinecone/core/openapi/db_data/model/search_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -150,6 +150,8 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -167,6 +169,8 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -188,6 +192,8 @@ def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +245,8 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +262,8 @@ def __init__(self, read_units, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/search_vector.py b/pinecone/core/openapi/db_data/model/search_vector.py index 417bde50..00be22b5 100644 --- a/pinecone/core/openapi/db_data/model/search_vector.py +++ b/pinecone/core/openapi/db_data/model/search_vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -146,6 +146,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 values (VectorValues): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +165,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -183,6 +187,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -230,6 +236,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 values (VectorValues): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -245,6 +253,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/single_query_results.py b/pinecone/core/openapi/db_data/model/single_query_results.py index b10e442d..d5636688 100644 --- a/pinecone/core/openapi/db_data/model/single_query_results.py +++ b/pinecone/core/openapi/db_data/model/single_query_results.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -149,6 +149,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 namespace (str): The namespace for the vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -166,6 +168,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -186,6 +190,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -234,6 +240,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace for the vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -249,6 +257,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/sparse_values.py b/pinecone/core/openapi/db_data/model/sparse_values.py index 0883d17e..8100f664 100644 --- a/pinecone/core/openapi/db_data/model/sparse_values.py +++ b/pinecone/core/openapi/db_data/model/sparse_values.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -146,6 +146,8 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -163,6 +165,8 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -185,6 +189,8 @@ def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -235,6 +241,8 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -250,6 +258,8 @@ def __init__(self, indices, values, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index 891de0b2..20e23275 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -120,7 +120,7 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 """StartImportRequest - a model defined in OpenAPI Args: - uri (str): The [URI prefix](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + uri (str): The [URI prefix](https://docs.pinecone.io/guides/index-data/import-data#prepare-your-data) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -157,6 +157,8 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 error_mode (ImportErrorMode): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -174,6 +176,8 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -195,6 +199,8 @@ def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -209,7 +215,7 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 """StartImportRequest - a model defined in OpenAPI Args: - uri (str): The [URI prefix](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + uri (str): The [URI prefix](https://docs.pinecone.io/guides/index-data/import-data#prepare-your-data) under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -246,6 +252,8 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 error_mode (ImportErrorMode): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -261,6 +269,8 @@ def __init__(self, uri, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/start_import_response.py b/pinecone/core/openapi/db_data/model/start_import_response.py index ede99271..d8511fe8 100644 --- a/pinecone/core/openapi/db_data/model/start_import_response.py +++ b/pinecone/core/openapi/db_data/model/start_import_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -140,6 +140,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 id (str): Unique identifier for the import operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +181,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +230,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 id (str): Unique identifier for the import operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +247,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/update_request.py b/pinecone/core/openapi/db_data/model/update_request.py index cfa9f344..c45849b1 100644 --- a/pinecone/core/openapi/db_data/model/update_request.py +++ b/pinecone/core/openapi/db_data/model/update_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -163,6 +163,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -180,6 +182,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -201,6 +205,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -254,6 +260,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -269,6 +277,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_record.py b/pinecone/core/openapi/db_data/model/upsert_record.py index b95940eb..31445ab4 100644 --- a/pinecone/core/openapi/db_data/model/upsert_record.py +++ b/pinecone/core/openapi/db_data/model/upsert_record.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -140,6 +140,8 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +159,8 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -178,6 +182,8 @@ def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -227,6 +233,8 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -242,6 +250,8 @@ def __init__(self, _id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index c842647a..fccfb3c8 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -151,6 +151,8 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 namespace (str): The namespace where you upsert vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +170,8 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -189,6 +193,8 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -239,6 +245,8 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 namespace (str): The namespace where you upsert vectors. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -254,6 +262,8 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/upsert_response.py b/pinecone/core/openapi/db_data/model/upsert_response.py index 2ba3056d..57098ed2 100644 --- a/pinecone/core/openapi/db_data/model/upsert_response.py +++ b/pinecone/core/openapi/db_data/model/upsert_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 upserted_count (int): The number of vectors upserted. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 upserted_count (int): The number of vectors upserted. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/usage.py b/pinecone/core/openapi/db_data/model/usage.py index 7ddc0c28..61f3faa5 100644 --- a/pinecone/core/openapi/db_data/model/usage.py +++ b/pinecone/core/openapi/db_data/model/usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 read_units (int): The number of read units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 read_units (int): The number of read units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector.py b/pinecone/core/openapi/db_data/model/vector.py index 167806f9..a83536a5 100644 --- a/pinecone/core/openapi/db_data/model/vector.py +++ b/pinecone/core/openapi/db_data/model/vector.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -160,6 +160,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -177,6 +179,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -198,6 +202,8 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -250,6 +256,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -265,6 +273,8 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/model/vector_values.py b/pinecone/core/openapi/db_data/model/vector_values.py index 7777cdc5..b18494cd 100644 --- a/pinecone/core/openapi/db_data/model/vector_values.py +++ b/pinecone/core/openapi/db_data/model/vector_values.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -93,6 +93,8 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -169,12 +171,16 @@ def __init__(self, *args, **kwargs) -> None: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -259,12 +265,16 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/db_data/models/__init__.py b/pinecone/core/openapi/db_data/models/__init__.py index b5fa9fa0..34e9a6d8 100644 --- a/pinecone/core/openapi/db_data/models/__init__.py +++ b/pinecone/core/openapi/db_data/models/__init__.py @@ -20,7 +20,9 @@ from pinecone.core.openapi.db_data.model.index_description import IndexDescription from pinecone.core.openapi.db_data.model.list_imports_response import ListImportsResponse from pinecone.core.openapi.db_data.model.list_item import ListItem +from pinecone.core.openapi.db_data.model.list_namespaces_response import ListNamespacesResponse from pinecone.core.openapi.db_data.model.list_response import ListResponse +from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription from pinecone.core.openapi.db_data.model.namespace_summary import NamespaceSummary from pinecone.core.openapi.db_data.model.pagination import Pagination from pinecone.core.openapi.db_data.model.protobuf_any import ProtobufAny diff --git a/pinecone/core/openapi/inference/__init__.py b/pinecone/core/openapi/inference/__init__.py index 3e258e4f..9bf0fcdb 100644 --- a/pinecone/core/openapi/inference/__init__.py +++ b/pinecone/core/openapi/inference/__init__.py @@ -7,7 +7,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -17,7 +17,7 @@ from pinecone.openapi_support.api_client import ApiClient # import Configuration -from pinecone.openapi_support.configuration import Configuration +from pinecone.config.openapi_configuration import Configuration # import exceptions from pinecone.openapi_support.exceptions import PineconeException @@ -27,4 +27,4 @@ from pinecone.openapi_support.exceptions import PineconeApiKeyError from pinecone.openapi_support.exceptions import PineconeApiException -API_VERSION = "2025-01" +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 1dc0e266..5c9d2efe 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -26,6 +26,8 @@ from pinecone.core.openapi.inference.model.embed_request import EmbedRequest from pinecone.core.openapi.inference.model.embeddings_list import EmbeddingsList from pinecone.core.openapi.inference.model.error_response import ErrorResponse +from pinecone.core.openapi.inference.model.model_info import ModelInfo +from pinecone.core.openapi.inference.model.model_info_list import ModelInfoList from pinecone.core.openapi.inference.model.rerank_request import RerankRequest from pinecone.core.openapi.inference.model.rerank_result import RerankResult @@ -42,9 +44,9 @@ def __init__(self, api_client=None) -> None: self.api_client = api_client def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """Embed data # noqa: E501 + """Generate vectors # noqa: E501 - Generate embeddings for input data. For guidance and examples, see [Generate embeddings](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -108,10 +110,147 @@ def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): callable=__embed, ) + def __get_model(self, model_name, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get available model details. # noqa: E501 + + Get model details. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_model(model_name, async_req=True) + >>> result = thread.get() + + Args: + model_name (str): The name of the model to look up. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ModelInfo + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["model_name"] = model_name + return self.call_with_http_info(**kwargs) + + self.get_model = _Endpoint( + settings={ + "response_type": (ModelInfo,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models/{model_name}", + "operation_id": "get_model", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["model_name"], + "required": ["model_name"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"model_name": (str,)}, + "attribute_map": {"model_name": "model_name"}, + "location_map": {"model_name": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__get_model, + ) + + def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get available models. # noqa: E501 + + Get available models. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_models(async_req=True) + >>> result = thread.get() + + + Keyword Args: + type (str): Filter models by type ('embed' or 'rerank'). [optional] + vector_type (str): Filter embedding models by vector type ('dense' or 'sparse'). Only relevant when `type=embed`. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + ModelInfoList + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_models = _Endpoint( + settings={ + "response_type": (ModelInfoList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models", + "operation_id": "list_models", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["type", "vector_type"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"type": (str,), "vector_type": (str,)}, + "attribute_map": {"type": "type", "vector_type": "vector_type"}, + "location_map": {"type": "query", "vector_type": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_models, + ) + def __rerank(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank). # noqa: E501 + Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -188,9 +327,9 @@ def __init__(self, api_client=None) -> None: self.api_client = api_client async def __embed(self, **kwargs): - """Embed data # noqa: E501 + """Generate vectors # noqa: E501 - Generate embeddings for input data. For guidance and examples, see [Generate embeddings](https://docs.pinecone.io/guides/inference/generate-embeddings). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 @@ -247,10 +386,133 @@ async def __embed(self, **kwargs): callable=__embed, ) + async def __get_model(self, model_name, **kwargs): + """Get available model details. # noqa: E501 + + Get model details. # noqa: E501 + + + Args: + model_name (str): The name of the model to look up. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ModelInfo + """ + self._process_openapi_kwargs(kwargs) + kwargs["model_name"] = model_name + return await self.call_with_http_info(**kwargs) + + self.get_model = _AsyncioEndpoint( + settings={ + "response_type": (ModelInfo,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models/{model_name}", + "operation_id": "get_model", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["model_name"], + "required": ["model_name"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"model_name": (str,)}, + "attribute_map": {"model_name": "model_name"}, + "location_map": {"model_name": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__get_model, + ) + + async def __list_models(self, **kwargs): + """Get available models. # noqa: E501 + + Get available models. # noqa: E501 + + + + Keyword Args: + type (str): Filter models by type ('embed' or 'rerank'). [optional] + vector_type (str): Filter embedding models by vector type ('dense' or 'sparse'). Only relevant when `type=embed`. [optional] + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + ModelInfoList + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_models = _AsyncioEndpoint( + settings={ + "response_type": (ModelInfoList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/models", + "operation_id": "list_models", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["type", "vector_type"], + "required": [], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"type": (str,), "vector_type": (str,)}, + "attribute_map": {"type": "type", "vector_type": "vector_type"}, + "location_map": {"type": "query", "vector_type": "query"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_models, + ) + async def __rerank(self, **kwargs): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank). # noqa: E501 + Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/dense_embedding.py b/pinecone/core/openapi/inference/model/dense_embedding.py index fe841f99..50b6a725 100644 --- a/pinecone/core/openapi/inference/model/dense_embedding.py +++ b/pinecone/core/openapi/inference/model/dense_embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -27,12 +27,6 @@ from pinecone.openapi_support.exceptions import PineconeApiAttributeError -def lazy_import(): - from pinecone.core.openapi.inference.model.vector_type import VectorType - - globals()["VectorType"] = VectorType - - from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property @@ -75,7 +69,6 @@ def additional_properties_type(cls): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, dict, float, int, list, str, none_type) # noqa: E501 _nullable = False @@ -90,10 +83,9 @@ def openapi_types(cls): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { "values": ([float],), # noqa: E501 - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 } @cached_class_property @@ -116,7 +108,7 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: Args: values ([float]): The dense embedding values. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -151,6 +143,8 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +162,8 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +186,8 @@ def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -205,7 +203,7 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 Args: values ([float]): The dense embedding values. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -240,6 +238,8 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +255,8 @@ def __init__(self, values, vector_type, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/document.py b/pinecone/core/openapi/inference/model/document.py index 159969aa..79ebb5d0 100644 --- a/pinecone/core/openapi/inference/model/document.py +++ b/pinecone/core/openapi/inference/model/document.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -133,6 +133,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -150,6 +152,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -170,6 +174,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -216,6 +222,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -231,6 +239,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index 6f91a05a..0141f9db 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -117,7 +117,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no """EmbedRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to use for embedding generation. + model (str): The [model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to use for embedding generation. inputs ([EmbedRequestInputs]): List of inputs to generate embeddings for. Keyword Args: @@ -151,9 +151,11 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +173,8 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +197,8 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -207,7 +213,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 """EmbedRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) to use for embedding generation. + model (str): The [model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to use for embedding generation. inputs ([EmbedRequestInputs]): List of inputs to generate embeddings for. Keyword Args: @@ -241,9 +247,11 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +267,8 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embed_request_inputs.py b/pinecone/core/openapi/inference/model/embed_request_inputs.py index 44832a2e..55fa9f69 100644 --- a/pinecone/core/openapi/inference/model/embed_request_inputs.py +++ b/pinecone/core/openapi/inference/model/embed_request_inputs.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -138,6 +138,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 text (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -155,6 +157,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -175,6 +179,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -222,6 +228,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 text (str): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -237,6 +245,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embedding.py b/pinecone/core/openapi/inference/model/embedding.py index a7150a27..8b0bf05b 100644 --- a/pinecone/core/openapi/inference/model/embedding.py +++ b/pinecone/core/openapi/inference/model/embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -30,11 +30,9 @@ def lazy_import(): from pinecone.core.openapi.inference.model.dense_embedding import DenseEmbedding from pinecone.core.openapi.inference.model.sparse_embedding import SparseEmbedding - from pinecone.core.openapi.inference.model.vector_type import VectorType globals()["DenseEmbedding"] = DenseEmbedding globals()["SparseEmbedding"] = SparseEmbedding - globals()["VectorType"] = VectorType from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar @@ -96,7 +94,7 @@ def openapi_types(cls): """ lazy_import() return { - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 "sparse_tokens": ([str],), # noqa: E501 "values": ([float],), # noqa: E501 "sparse_values": ([float],), # noqa: E501 @@ -132,7 +130,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 """Embedding - a model defined in OpenAPI Keyword Args: - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -220,6 +218,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -237,7 +237,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 """Embedding - a model defined in OpenAPI Keyword Args: - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -274,6 +274,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 sparse_indices ([int]): The sparse embedding indices. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -289,6 +291,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list.py b/pinecone/core/openapi/inference/model/embeddings_list.py index 0b7de1ef..87df31f8 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list.py +++ b/pinecone/core/openapi/inference/model/embeddings_list.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -159,6 +159,8 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +178,8 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -200,6 +204,8 @@ def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **k required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -252,6 +258,8 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -267,6 +275,8 @@ def __init__(self, model, vector_type, data, usage, *args, **kwargs) -> None: # ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/embeddings_list_usage.py b/pinecone/core/openapi/inference/model/embeddings_list_usage.py index 95e3263e..6cdea666 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list_usage.py +++ b/pinecone/core/openapi/inference/model/embeddings_list_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -140,6 +140,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 total_tokens (int): Total number of tokens consumed across all inputs. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +181,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +230,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 total_tokens (int): Total number of tokens consumed across all inputs. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +247,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response.py b/pinecone/core/openapi/inference/model/error_response.py index ce18d362..b526e6a2 100644 --- a/pinecone/core/openapi/inference/model/error_response.py +++ b/pinecone/core/openapi/inference/model/error_response.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -151,6 +151,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -168,6 +170,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -190,6 +194,8 @@ def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -240,6 +246,8 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -255,6 +263,8 @@ def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/error_response_error.py b/pinecone/core/openapi/inference/model/error_response_error.py index e6884825..595a5f1f 100644 --- a/pinecone/core/openapi/inference/model/error_response_error.py +++ b/pinecone/core/openapi/inference/model/error_response_error.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -167,6 +167,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -184,6 +186,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -206,6 +210,8 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -257,6 +263,8 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -272,6 +280,8 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py new file mode 100644 index 00000000..2d983cd9 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -0,0 +1,348 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( + ModelInfoSupportedMetrics, + ) + from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter, + ) + + globals()["ModelInfoSupportedMetrics"] = ModelInfoSupportedMetrics + globals()["ModelInfoSupportedParameter"] = ModelInfoSupportedParameter + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfo") + + +class ModelInfo(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("default_dimension",): {"inclusive_maximum": 20000, "inclusive_minimum": 1}, + ("max_sequence_length",): {"inclusive_minimum": 1}, + ("max_batch_size",): {"inclusive_minimum": 1}, + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "model": (str,), # noqa: E501 + "short_description": (str,), # noqa: E501 + "type": (str,), # noqa: E501 + "supported_parameters": ([ModelInfoSupportedParameter],), # noqa: E501 + "vector_type": (str,), # noqa: E501 + "default_dimension": (int,), # noqa: E501 + "modality": (str,), # noqa: E501 + "max_sequence_length": (int,), # noqa: E501 + "max_batch_size": (int,), # noqa: E501 + "provider_name": (str,), # noqa: E501 + "supported_dimensions": ([int],), # noqa: E501 + "supported_metrics": (ModelInfoSupportedMetrics,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "model": "model", # noqa: E501 + "short_description": "short_description", # noqa: E501 + "type": "type", # noqa: E501 + "supported_parameters": "supported_parameters", # noqa: E501 + "vector_type": "vector_type", # noqa: E501 + "default_dimension": "default_dimension", # noqa: E501 + "modality": "modality", # noqa: E501 + "max_sequence_length": "max_sequence_length", # noqa: E501 + "max_batch_size": "max_batch_size", # noqa: E501 + "provider_name": "provider_name", # noqa: E501 + "supported_dimensions": "supported_dimensions", # noqa: E501 + "supported_metrics": "supported_metrics", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], model, short_description, type, supported_parameters, *args, **kwargs + ) -> T: # noqa: E501 + """ModelInfo - a model defined in OpenAPI + + Args: + model (str): The name of the model. + short_description (str): A summary of the model. + type (str): The type of model (e.g. 'embed' or 'rerank'). + supported_parameters ([ModelInfoSupportedParameter]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 + default_dimension (int): The default embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 + max_sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + max_batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + provider_name (str): The name of the provider of the model. [optional] # noqa: E501 + supported_dimensions ([int]): The list of supported dimensions for the model (applies to dense embedding models only). [optional] # noqa: E501 + supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.model = model + self.short_description = short_description + self.type = type + self.supported_parameters = supported_parameters + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, model, short_description, type, supported_parameters, *args, **kwargs + ) -> None: # noqa: E501 + """ModelInfo - a model defined in OpenAPI + + Args: + model (str): The name of the model. + short_description (str): A summary of the model. + type (str): The type of model (e.g. 'embed' or 'rerank'). + supported_parameters ([ModelInfoSupportedParameter]): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + vector_type (str): Whether the embedding model produces 'dense' or 'sparse' embeddings. [optional] # noqa: E501 + default_dimension (int): The default embedding model dimension (applies to dense embedding models only). [optional] # noqa: E501 + modality (str): The modality of the model (e.g. 'text'). [optional] # noqa: E501 + max_sequence_length (int): The maximum tokens per sequence supported by the model. [optional] # noqa: E501 + max_batch_size (int): The maximum batch size (number of sequences) supported by the model. [optional] # noqa: E501 + provider_name (str): The name of the provider of the model. [optional] # noqa: E501 + supported_dimensions ([int]): The list of supported dimensions for the model (applies to dense embedding models only). [optional] # noqa: E501 + supported_metrics (ModelInfoSupportedMetrics): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.model = model + self.short_description = short_description + self.type = type + self.supported_parameters = supported_parameters + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/model/model_info_list.py b/pinecone/core/openapi/inference/model/model_info_list.py new file mode 100644 index 00000000..a47cb910 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_list.py @@ -0,0 +1,278 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info import ModelInfo + + globals()["ModelInfo"] = ModelInfo + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoList") + + +class ModelInfoList(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "models": ([ModelInfo],) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "models": "models" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """ModelInfoList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + models ([ModelInfo]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """ModelInfoList - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + models ([ModelInfo]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/model/vector_type.py b/pinecone/core/openapi/inference/model/model_info_metric.py similarity index 87% rename from pinecone/core/openapi/inference/model/vector_type.py rename to pinecone/core/openapi/inference/model/model_info_metric.py index ffeb8706..0dbcbf1f 100644 --- a/pinecone/core/openapi/inference/model/vector_type.py +++ b/pinecone/core/openapi/inference/model/model_info_metric.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -30,10 +30,10 @@ from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property -T = TypeVar("T", bound="VectorType") +T = TypeVar("T", bound="ModelInfoMetric") -class VectorType(ModelSimple): +class ModelInfoMetric(ModelSimple): """NOTE: This class is @generated using OpenAPI. Do not edit the class manually. @@ -56,7 +56,7 @@ class VectorType(ModelSimple): _check_type: bool allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { - ("value",): {"DENSE": "dense", "SPARSE": "sparse"} + ("value",): {"COSINE": "cosine", "EUCLIDEAN": "euclidean", "DOTPRODUCT": "dotproduct"} } validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} @@ -95,6 +95,8 @@ def discriminator(cls): required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -106,15 +108,15 @@ def discriminator(cls): @convert_js_args_to_python_args def __init__(self, *args, **kwargs) -> None: - """VectorType - a model defined in OpenAPI + """ModelInfoMetric - a model defined in OpenAPI Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Indicates whether this is a 'dense' or 'sparse' embedding.., must be one of ["dense", "sparse", ] # noqa: E501 + args[0] (str): A distance metric that the embedding model supports for similarity searches.., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 Keyword Args: - value (str): Indicates whether this is a 'dense' or 'sparse' embedding.., must be one of ["dense", "sparse", ] # noqa: E501 + value (str): A distance metric that the embedding model supports for similarity searches.., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -171,12 +173,16 @@ def __init__(self, *args, **kwargs) -> None: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -194,15 +200,15 @@ def __init__(self, *args, **kwargs) -> None: @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: - """VectorType - a model defined in OpenAPI + """ModelInfoMetric - a model defined in OpenAPI Note that value can be passed either in args or in kwargs, but not in both. Args: - args[0] (str): Indicates whether this is a 'dense' or 'sparse' embedding., must be one of ["dense", "sparse", ] # noqa: E501 + args[0] (str): A distance metric that the embedding model supports for similarity searches., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 Keyword Args: - value (str): Indicates whether this is a 'dense' or 'sparse' embedding., must be one of ["dense", "sparse", ] # noqa: E501 + value (str): A distance metric that the embedding model supports for similarity searches., must be one of ["cosine", "euclidean", "dotproduct", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -261,12 +267,16 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: valid_classes=(self.__class__,), ) + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _configuration = kwargs.pop("_configuration", None) _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/model_info_supported_metrics.py b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py new file mode 100644 index 00000000..a13fec67 --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_supported_metrics.py @@ -0,0 +1,300 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.inference.model.model_info_metric import ModelInfoMetric + + globals()["ModelInfoMetric"] = ModelInfoMetric + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoSupportedMetrics") + + +class ModelInfoSupportedMetrics(ModelSimple): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return {"value": ([ModelInfoMetric],)} + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = {} + + read_only_vars: Set[str] = set() + + _composed_schemas = None + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: + """ModelInfoSupportedMetrics - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] ([ModelInfoMetric]): The distance metrics supported by the model for similarity search.. # noqa: E501 + + Keyword Args: + value ([ModelInfoMetric]): The distance metrics supported by the model for similarity search.. # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: + """ModelInfoSupportedMetrics - a model defined in OpenAPI + + Note that value can be passed either in args or in kwargs, but not in both. + + Args: + args[0] ([ModelInfoMetric]): The distance metrics supported by the model for similarity search. # noqa: E501 + + Keyword Args: + value ([ModelInfoMetric]): The distance metrics supported by the model for similarity search. # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + # required up here when default value is not given + _path_to_item = kwargs.pop("_path_to_item", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + value = None + if "value" in kwargs: + value = kwargs.pop("value") + + if value is None and args: + if len(args) == 1: + value = args[0] + elif len(args) > 1: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + if value is None: + raise PineconeApiTypeError( + "value is required, but not passed in args or kwargs and doesn't have default", + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.value = value + if kwargs: + raise PineconeApiTypeError( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % (kwargs, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + return self diff --git a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py new file mode 100644 index 00000000..ec84f8ea --- /dev/null +++ b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py @@ -0,0 +1,312 @@ +""" +Pinecone Inference API + +Pinecone is a vector database that makes it easy to search and retrieve billions of high-dimensional vectors. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="ModelInfoSupportedParameter") + + +class ModelInfoSupportedParameter(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "parameter": (str,), # noqa: E501 + "type": (str,), # noqa: E501 + "value_type": (str,), # noqa: E501 + "required": (bool,), # noqa: E501 + "allowed_values": ([dict],), # noqa: E501 + "min": (float,), # noqa: E501 + "max": (float,), # noqa: E501 + "default": (dict,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "parameter": "parameter", # noqa: E501 + "type": "type", # noqa: E501 + "value_type": "value_type", # noqa: E501 + "required": "required", # noqa: E501 + "allowed_values": "allowed_values", # noqa: E501 + "min": "min", # noqa: E501 + "max": "max", # noqa: E501 + "default": "default", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], parameter, type, value_type, required, *args, **kwargs + ) -> T: # noqa: E501 + """ModelInfoSupportedParameter - a model defined in OpenAPI + + Args: + parameter (str): The name of the parameter. + type (str): The parameter type e.g. 'one_of', 'numeric_range', or 'any'. If the type is 'one_of', then 'allowed_values' will be set, and the value specified must be one of the allowed values. 'one_of' is only compatible with value_type 'string' or 'integer'. If 'numeric_range', then 'min' and 'max' will be set, then the value specified must adhere to the value_type and must fall within the `[min, max]` range (inclusive). If 'any' then any value is allowed, as long as it adheres to the value_type. + value_type (str): The type of value the parameter accepts, e.g. 'string', 'integer', 'float', or 'boolean'. + required (bool): Whether the parameter is required (true) or optional (false). + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + allowed_values ([dict]): The allowed parameter values when the type is 'one_of'. [optional] # noqa: E501 + min (float): The minimum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + max (float): The maximum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.parameter = parameter + self.type = type + self.value_type = value_type + self.required = required + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, parameter, type, value_type, required, *args, **kwargs) -> None: # noqa: E501 + """ModelInfoSupportedParameter - a model defined in OpenAPI + + Args: + parameter (str): The name of the parameter. + type (str): The parameter type e.g. 'one_of', 'numeric_range', or 'any'. If the type is 'one_of', then 'allowed_values' will be set, and the value specified must be one of the allowed values. 'one_of' is only compatible with value_type 'string' or 'integer'. If 'numeric_range', then 'min' and 'max' will be set, then the value specified must adhere to the value_type and must fall within the `[min, max]` range (inclusive). If 'any' then any value is allowed, as long as it adheres to the value_type. + value_type (str): The type of value the parameter accepts, e.g. 'string', 'integer', 'float', or 'boolean'. + required (bool): Whether the parameter is required (true) or optional (false). + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + allowed_values ([dict]): The allowed parameter values when the type is 'one_of'. [optional] # noqa: E501 + min (float): The minimum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + max (float): The maximum allowed value (inclusive) when the type is 'numeric_range'. [optional] # noqa: E501 + default (dict): The default value for the parameter when a parameter is optional. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.parameter = parameter + self.type = type + self.value_type = value_type + self.required = required + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/inference/model/ranked_document.py b/pinecone/core/openapi/inference/model/ranked_document.py index 6223c752..e222d005 100644 --- a/pinecone/core/openapi/inference/model/ranked_document.py +++ b/pinecone/core/openapi/inference/model/ranked_document.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -154,6 +154,8 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq document (Document): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -171,6 +173,8 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -193,6 +197,8 @@ def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noq required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -244,6 +250,8 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 document (Document): [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -259,6 +267,8 @@ def __init__(self, index, score, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index dea9ee3c..f9539da4 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -125,7 +125,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - """RerankRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use for reranking. + model (str): The [model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use for reranking. query (str): The query to rerank documents against. documents ([Document]): The documents to rerank. @@ -162,10 +162,12 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - _visited_composed_classes = (Animal,) top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -183,6 +185,8 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -206,6 +210,8 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -220,7 +226,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E """RerankRequest - a model defined in OpenAPI Args: - model (str): The [model](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) to use for reranking. + model (str): The [model](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) to use for reranking. query (str): The query to rerank documents against. documents ([Document]): The documents to rerank. @@ -257,10 +263,12 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E _visited_composed_classes = (Animal,) top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 - rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/inference/understanding-inference#reranking-models) for available model parameters. [optional] # noqa: E501 + rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 + parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -276,6 +284,8 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result.py b/pinecone/core/openapi/inference/model/rerank_result.py index 2abeb482..cc7e2b7c 100644 --- a/pinecone/core/openapi/inference/model/rerank_result.py +++ b/pinecone/core/openapi/inference/model/rerank_result.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -156,6 +156,8 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -173,6 +175,8 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -196,6 +200,8 @@ def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -247,6 +253,8 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -262,6 +270,8 @@ def __init__(self, model, data, usage, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/rerank_result_usage.py b/pinecone/core/openapi/inference/model/rerank_result_usage.py index 44f45c33..02ae6320 100644 --- a/pinecone/core/openapi/inference/model/rerank_result_usage.py +++ b/pinecone/core/openapi/inference/model/rerank_result_usage.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -140,6 +140,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -157,6 +159,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -177,6 +181,8 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -224,6 +230,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 rerank_units (int): The number of rerank units consumed by this operation. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -239,6 +247,8 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/model/sparse_embedding.py b/pinecone/core/openapi/inference/model/sparse_embedding.py index f4f97415..a86574f9 100644 --- a/pinecone/core/openapi/inference/model/sparse_embedding.py +++ b/pinecone/core/openapi/inference/model/sparse_embedding.py @@ -5,7 +5,7 @@ This file is @generated using OpenAPI. -The version of the OpenAPI document: 2025-01 +The version of the OpenAPI document: 2025-04 Contact: support@pinecone.io """ @@ -27,12 +27,6 @@ from pinecone.openapi_support.exceptions import PineconeApiAttributeError -def lazy_import(): - from pinecone.core.openapi.inference.model.vector_type import VectorType - - globals()["VectorType"] = VectorType - - from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property @@ -75,7 +69,6 @@ def additional_properties_type(cls): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return (bool, dict, float, int, list, str, none_type) # noqa: E501 _nullable = False @@ -90,11 +83,10 @@ def openapi_types(cls): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { "sparse_values": ([float],), # noqa: E501 "sparse_indices": ([int],), # noqa: E501 - "vector_type": (VectorType,), # noqa: E501 + "vector_type": (str,), # noqa: E501 "sparse_tokens": ([str],), # noqa: E501 } @@ -123,7 +115,7 @@ def _from_openapi_data( Args: sparse_values ([float]): The sparse embedding values. sparse_indices ([int]): The sparse embedding indices. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -159,6 +151,8 @@ def _from_openapi_data( sparse_tokens ([str]): The normalized tokens used to create the sparse embedding. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -176,6 +170,8 @@ def _from_openapi_data( ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item @@ -199,6 +195,8 @@ def _from_openapi_data( required_properties = set( [ + "_enforce_allowed_values", + "_enforce_validations", "_data_store", "_check_type", "_spec_property_naming", @@ -215,7 +213,7 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) Args: sparse_values ([float]): The sparse embedding values. sparse_indices ([int]): The sparse embedding indices. - vector_type (VectorType): + vector_type (str): Indicates whether this is a 'dense' or 'sparse' embedding. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -251,6 +249,8 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) sparse_tokens ([str]): The normalized tokens used to create the sparse embedding. [optional] # noqa: E501 """ + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -266,6 +266,8 @@ def __init__(self, sparse_values, sparse_indices, vector_type, *args, **kwargs) ) self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item diff --git a/pinecone/core/openapi/inference/models/__init__.py b/pinecone/core/openapi/inference/models/__init__.py index bbb2cd34..d68ec1ff 100644 --- a/pinecone/core/openapi/inference/models/__init__.py +++ b/pinecone/core/openapi/inference/models/__init__.py @@ -18,9 +18,17 @@ from pinecone.core.openapi.inference.model.embeddings_list_usage import EmbeddingsListUsage from pinecone.core.openapi.inference.model.error_response import ErrorResponse from pinecone.core.openapi.inference.model.error_response_error import ErrorResponseError +from pinecone.core.openapi.inference.model.model_info import ModelInfo +from pinecone.core.openapi.inference.model.model_info_list import ModelInfoList +from pinecone.core.openapi.inference.model.model_info_metric import ModelInfoMetric +from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( + ModelInfoSupportedMetrics, +) +from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter, +) from pinecone.core.openapi.inference.model.ranked_document import RankedDocument from pinecone.core.openapi.inference.model.rerank_request import RerankRequest from pinecone.core.openapi.inference.model.rerank_result import RerankResult from pinecone.core.openapi.inference.model.rerank_result_usage import RerankResultUsage from pinecone.core.openapi.inference.model.sparse_embedding import SparseEmbedding -from pinecone.core.openapi.inference.model.vector_type import VectorType diff --git a/pinecone/data/__init__.py b/pinecone/data/__init__.py index 8e040056..3ea4cd41 100644 --- a/pinecone/data/__init__.py +++ b/pinecone/data/__init__.py @@ -1,34 +1,10 @@ -from .index import ( - Index as _Index, - FetchResponse, - QueryResponse, - DescribeIndexStatsResponse, - UpsertResponse, - SparseValues, - Vector, -) -from .dataclasses import * -from .import_error import ( - Index, - IndexClientInstantiationError, - Inference, - InferenceInstantiationError, -) -from .index_asyncio import * -from .errors import ( - VectorDictionaryMissingKeysError, - VectorDictionaryExcessKeysError, - VectorTupleLengthError, - SparseValuesTypeError, - SparseValuesMissingKeysError, - SparseValuesDictionaryExpectedError, - MetadataDictionaryExpectedError, -) +import warnings + +from pinecone.db_data import * -from .features.bulk_import import ImportErrorMode -from .features.inference import ( - Inference as _Inference, - AsyncioInference as _AsyncioInference, - RerankModel, - EmbedModel, +warnings.warn( + "The module at `pinecone.data` has moved to `pinecone.db_data`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, ) diff --git a/pinecone/data/features/__init__.py b/pinecone/data/features/__init__.py index e69de29b..e4ff12ee 100644 --- a/pinecone/data/features/__init__.py +++ b/pinecone/data/features/__init__.py @@ -0,0 +1,10 @@ +import warnings + +from pinecone.db_data.features import * + +warnings.warn( + "The module at `pinecone.data.features` has moved to `pinecone.db_data.features`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) diff --git a/pinecone/data/features/bulk_imports/__init__.py b/pinecone/data/features/bulk_imports/__init__.py new file mode 100644 index 00000000..3af0d1f5 --- /dev/null +++ b/pinecone/data/features/bulk_imports/__init__.py @@ -0,0 +1,10 @@ +import warnings + +from pinecone.db_data.features.bulk_import import * + +warnings.warn( + "The module at `pinecone.data.features.bulk_import` has moved to `pinecone.db_data.features.bulk_import`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) diff --git a/pinecone/data/features/inference/__init__.py b/pinecone/data/features/inference/__init__.py index 30e93330..0280f382 100644 --- a/pinecone/data/features/inference/__init__.py +++ b/pinecone/data/features/inference/__init__.py @@ -1,6 +1,10 @@ -from .repl_overrides import install_repl_overrides -from .inference import Inference -from .inference_asyncio import AsyncioInference -from .inference_request_builder import RerankModel, EmbedModel +import warnings -install_repl_overrides() +from pinecone.inference import * + +warnings.warn( + "The module at `pinecone.data.features.inference` has moved to `pinecone.inference`. " + "Please update your imports. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) diff --git a/pinecone/data/features/inference/models/__init__.py b/pinecone/data/features/inference/models/__init__.py deleted file mode 100644 index b9a18aeb..00000000 --- a/pinecone/data/features/inference/models/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .embedding_list import EmbeddingsList -from .rerank_result import RerankResult diff --git a/pinecone/db_control/__init__.py b/pinecone/db_control/__init__.py new file mode 100644 index 00000000..73d82468 --- /dev/null +++ b/pinecone/db_control/__init__.py @@ -0,0 +1,7 @@ +from .enums import * +from .models import * +from .db_control import DBControl +from .db_control_asyncio import DBControlAsyncio +from .repr_overrides import install_repr_overrides + +install_repr_overrides() diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py new file mode 100644 index 00000000..ec6a412b --- /dev/null +++ b/pinecone/db_control/db_control.py @@ -0,0 +1,109 @@ +import logging +from typing import Optional, TYPE_CHECKING + +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi +from pinecone.openapi_support.api_client import ApiClient + +from pinecone.utils import setup_openapi_client, PluginAware +from pinecone.core.openapi.db_control import API_VERSION + + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + from .resources.sync.index import IndexResource + from .resources.sync.collection import CollectionResource + from .resources.sync.restore_job import RestoreJobResource + from .resources.sync.backup import BackupResource + from pinecone.config import Config, OpenApiConfiguration + + +class DBControl(PluginAware): + def __init__( + self, config: "Config", openapi_config: "OpenApiConfiguration", pool_threads: int + ) -> None: + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + self._index_api = setup_openapi_client( + api_client_klass=ApiClient, + api_klass=ManageIndexesApi, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + api_version=API_VERSION, + ) + """ @private """ + + self._index_resource: Optional["IndexResource"] = None + """ @private """ + + self._collection_resource: Optional["CollectionResource"] = None + """ @private """ + + self._restore_job_resource: Optional["RestoreJobResource"] = None + """ @private """ + + self._backup_resource: Optional["BackupResource"] = None + """ @private """ + + super().__init__() # Initialize PluginAware + + @property + def index(self) -> "IndexResource": + if self._index_resource is None: + from .resources.sync.index import IndexResource + + self._index_resource = IndexResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._index_resource + + @property + def collection(self) -> "CollectionResource": + if self._collection_resource is None: + from .resources.sync.collection import CollectionResource + + self._collection_resource = CollectionResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._collection_resource + + @property + def restore_job(self) -> "RestoreJobResource": + if self._restore_job_resource is None: + from .resources.sync.restore_job import RestoreJobResource + + self._restore_job_resource = RestoreJobResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._restore_job_resource + + @property + def backup(self) -> "BackupResource": + if self._backup_resource is None: + from .resources.sync.backup import BackupResource + + self._backup_resource = BackupResource( + index_api=self._index_api, + config=self.config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._backup_resource diff --git a/pinecone/db_control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py new file mode 100644 index 00000000..7ae2196a --- /dev/null +++ b/pinecone/db_control/db_control_asyncio.py @@ -0,0 +1,83 @@ +import logging +from typing import Optional, TYPE_CHECKING + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.openapi_support import AsyncioApiClient + +from pinecone.utils import setup_async_openapi_client +from pinecone.core.openapi.db_control import API_VERSION + +logger = logging.getLogger(__name__) +""" @private """ + + +if TYPE_CHECKING: + from .resources.asyncio.index import IndexResourceAsyncio + from .resources.asyncio.collection import CollectionResourceAsyncio + from .resources.asyncio.restore_job import RestoreJobResourceAsyncio + from .resources.asyncio.backup import BackupResourceAsyncio + from pinecone.config import Config, OpenApiConfiguration + + +class DBControlAsyncio: + def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration") -> None: + self._config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._index_api = setup_async_openapi_client( + api_client_klass=AsyncioApiClient, + api_klass=AsyncioManageIndexesApi, + config=self._config, + openapi_config=self._openapi_config, + api_version=API_VERSION, + ) + """ @private """ + + self._index_resource: Optional["IndexResourceAsyncio"] = None + """ @private """ + + self._collection_resource: Optional["CollectionResourceAsyncio"] = None + """ @private """ + + self._restore_job_resource: Optional["RestoreJobResourceAsyncio"] = None + """ @private """ + + self._backup_resource: Optional["BackupResourceAsyncio"] = None + """ @private """ + + @property + def index(self) -> "IndexResourceAsyncio": + if self._index_resource is None: + from .resources.asyncio.index import IndexResourceAsyncio + + self._index_resource = IndexResourceAsyncio( + index_api=self._index_api, config=self._config + ) + return self._index_resource + + @property + def collection(self) -> "CollectionResourceAsyncio": + if self._collection_resource is None: + from .resources.asyncio.collection import CollectionResourceAsyncio + + self._collection_resource = CollectionResourceAsyncio(self._index_api) + return self._collection_resource + + @property + def restore_job(self) -> "RestoreJobResourceAsyncio": + if self._restore_job_resource is None: + from .resources.asyncio.restore_job import RestoreJobResourceAsyncio + + self._restore_job_resource = RestoreJobResourceAsyncio(self._index_api) + return self._restore_job_resource + + @property + def backup(self) -> "BackupResourceAsyncio": + if self._backup_resource is None: + from .resources.asyncio.backup import BackupResourceAsyncio + + self._backup_resource = BackupResourceAsyncio(self._index_api) + return self._backup_resource diff --git a/pinecone/enums/__init__.py b/pinecone/db_control/enums/__init__.py similarity index 100% rename from pinecone/enums/__init__.py rename to pinecone/db_control/enums/__init__.py diff --git a/pinecone/enums/clouds.py b/pinecone/db_control/enums/clouds.py similarity index 92% rename from pinecone/enums/clouds.py rename to pinecone/db_control/enums/clouds.py index 192b3da5..8903f40a 100644 --- a/pinecone/enums/clouds.py +++ b/pinecone/db_control/enums/clouds.py @@ -3,10 +3,10 @@ class CloudProvider(Enum): """Cloud providers available for use with Pinecone serverless indexes - + This list could expand or change over time as more cloud providers are supported. - Check the Pinecone documentation for the most up-to-date list of supported cloud - providers. If you want to use a cloud provider that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported cloud + providers. If you want to use a cloud provider that is not listed here, you can pass a string value directly without using this enum. """ @@ -17,10 +17,10 @@ class CloudProvider(Enum): class AwsRegion(Enum): """AWS (Amazon Web Services) regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ @@ -31,10 +31,10 @@ class AwsRegion(Enum): class GcpRegion(Enum): """GCP (Google Cloud Platform) regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ @@ -44,10 +44,10 @@ class GcpRegion(Enum): class AzureRegion(Enum): """Azure regions available for use with Pinecone serverless indexes - + This list could expand or change over time as more regions are supported. - Check the Pinecone documentation for the most up-to-date list of supported - regions. If you want to use a region that is not listed here, you can + Check the Pinecone documentation for the most up-to-date list of supported + regions. If you want to use a region that is not listed here, you can pass a string value directly without using this enum. """ diff --git a/pinecone/enums/deletion_protection.py b/pinecone/db_control/enums/deletion_protection.py similarity index 100% rename from pinecone/enums/deletion_protection.py rename to pinecone/db_control/enums/deletion_protection.py diff --git a/pinecone/enums/metric.py b/pinecone/db_control/enums/metric.py similarity index 100% rename from pinecone/enums/metric.py rename to pinecone/db_control/enums/metric.py diff --git a/pinecone/enums/pod_index_environment.py b/pinecone/db_control/enums/pod_index_environment.py similarity index 100% rename from pinecone/enums/pod_index_environment.py rename to pinecone/db_control/enums/pod_index_environment.py diff --git a/pinecone/enums/pod_type.py b/pinecone/db_control/enums/pod_type.py similarity index 100% rename from pinecone/enums/pod_type.py rename to pinecone/db_control/enums/pod_type.py diff --git a/pinecone/enums/vector_type.py b/pinecone/db_control/enums/vector_type.py similarity index 100% rename from pinecone/enums/vector_type.py rename to pinecone/db_control/enums/vector_type.py diff --git a/pinecone/control/index_host_store.py b/pinecone/db_control/index_host_store.py similarity index 100% rename from pinecone/control/index_host_store.py rename to pinecone/db_control/index_host_store.py diff --git a/pinecone/db_control/models/__init__.py b/pinecone/db_control/models/__init__.py new file mode 100644 index 00000000..cf866f11 --- /dev/null +++ b/pinecone/db_control/models/__init__.py @@ -0,0 +1,31 @@ +from .index_description import ServerlessSpecDefinition, PodSpecDefinition +from .collection_description import CollectionDescription +from .serverless_spec import ServerlessSpec +from .pod_spec import PodSpec +from .byoc_spec import ByocSpec +from .index_list import IndexList +from .collection_list import CollectionList +from .index_model import IndexModel +from ...inference.models.index_embed import IndexEmbed +from .backup_model import BackupModel +from .backup_list import BackupList +from .restore_job_model import RestoreJobModel +from .restore_job_list import RestoreJobList + + +__all__ = [ + "CollectionDescription", + "PodSpec", + "PodSpecDefinition", + "ServerlessSpec", + "ServerlessSpecDefinition", + "ByocSpec", + "IndexList", + "CollectionList", + "IndexModel", + "IndexEmbed", + "BackupModel", + "BackupList", + "RestoreJobModel", + "RestoreJobList", +] diff --git a/pinecone/db_control/models/backup_list.py b/pinecone/db_control/models/backup_list.py new file mode 100644 index 00000000..fe21c077 --- /dev/null +++ b/pinecone/db_control/models/backup_list.py @@ -0,0 +1,49 @@ +import json +from pinecone.core.openapi.db_control.model.backup_list import BackupList as OpenAPIBackupList +from .backup_model import BackupModel +from typing import List + + +class BackupList: + def __init__(self, backup_list: OpenAPIBackupList): + self._backup_list = backup_list + self._backups = [BackupModel(b) for b in self._backup_list.data] + + def names(self) -> List[str]: + return [i.name for i in self._backups] + + def __getitem__(self, key): + if isinstance(key, int): + return self._backups[key] + elif key == "data": + return self._backups + else: + # pagination and any other keys added in the future + return self._backup_list[key] + + def __getattr__(self, attr): + if attr == "data": + return self._backups + else: + # pagination and any other keys added in the future + return getattr(self._backup_list, attr) + + def __len__(self): + return len(self._backups) + + def __iter__(self): + return iter(self._backups) + + def __str__(self): + return str(self._backups) + + def __repr__(self): + raw_dict = self._backup_list.to_dict() + raw_dict["data"] = [i.to_dict() for i in self._backups] + + # Remove keys with value None + for key, value in list(raw_dict.items()): + if value is None: + del raw_dict[key] + + return json.dumps(raw_dict, indent=4) diff --git a/pinecone/db_control/models/backup_model.py b/pinecone/db_control/models/backup_model.py new file mode 100644 index 00000000..59dec7ba --- /dev/null +++ b/pinecone/db_control/models/backup_model.py @@ -0,0 +1,23 @@ +import json +from pinecone.core.openapi.db_control.model.backup_model import BackupModel as OpenAPIBackupModel +from pinecone.utils.repr_overrides import custom_serializer + + +class BackupModel: + def __init__(self, backup: OpenAPIBackupModel): + self._backup = backup + + def __getattr__(self, attr): + return getattr(self._backup, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + return self._backup.to_dict() diff --git a/pinecone/db_control/models/byoc_spec.py b/pinecone/db_control/models/byoc_spec.py new file mode 100644 index 00000000..ccbdff4a --- /dev/null +++ b/pinecone/db_control/models/byoc_spec.py @@ -0,0 +1,12 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ByocSpec: + """ + ByocSpec represents the configuration used to deploy a BYOC (Bring Your Own Cloud) index. + + To learn more about the options for each configuration, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) + """ + + environment: str diff --git a/pinecone/models/collection_description.py b/pinecone/db_control/models/collection_description.py similarity index 100% rename from pinecone/models/collection_description.py rename to pinecone/db_control/models/collection_description.py diff --git a/pinecone/models/collection_list.py b/pinecone/db_control/models/collection_list.py similarity index 87% rename from pinecone/models/collection_list.py rename to pinecone/db_control/models/collection_list.py index 508ec685..f36a9708 100644 --- a/pinecone/models/collection_list.py +++ b/pinecone/db_control/models/collection_list.py @@ -1,5 +1,7 @@ import json -from pinecone.core.openapi.db_control.models import CollectionList as OpenAPICollectionList +from pinecone.core.openapi.db_control.model.collection_list import ( + CollectionList as OpenAPICollectionList, +) class CollectionList: diff --git a/pinecone/models/index_description.py b/pinecone/db_control/models/index_description.py similarity index 100% rename from pinecone/models/index_description.py rename to pinecone/db_control/models/index_description.py diff --git a/pinecone/models/index_list.py b/pinecone/db_control/models/index_list.py similarity index 89% rename from pinecone/models/index_list.py rename to pinecone/db_control/models/index_list.py index 71242e24..e918b4f5 100644 --- a/pinecone/models/index_list.py +++ b/pinecone/db_control/models/index_list.py @@ -1,5 +1,5 @@ import json -from pinecone.core.openapi.db_control.models import IndexList as OpenAPIIndexList +from pinecone.core.openapi.db_control.model.index_list import IndexList as OpenAPIIndexList from .index_model import IndexModel from typing import List diff --git a/pinecone/models/index_model.py b/pinecone/db_control/models/index_model.py similarity index 61% rename from pinecone/models/index_model.py rename to pinecone/db_control/models/index_model.py index 7deb2d7d..a268df57 100644 --- a/pinecone/models/index_model.py +++ b/pinecone/db_control/models/index_model.py @@ -1,4 +1,6 @@ -from pinecone.core.openapi.db_control.models import IndexModel as OpenAPIIndexModel +from pinecone.core.openapi.db_control.model.index_model import IndexModel as OpenAPIIndexModel +import json +from pinecone.utils.repr_overrides import custom_serializer class IndexModel: @@ -15,5 +17,8 @@ def __getattr__(self, attr): def __getitem__(self, key): return self.__getattr__(key) + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + def to_dict(self): return self.index.to_dict() diff --git a/pinecone/models/list_response.py b/pinecone/db_control/models/list_response.py similarity index 100% rename from pinecone/models/list_response.py rename to pinecone/db_control/models/list_response.py diff --git a/pinecone/models/pod_spec.py b/pinecone/db_control/models/pod_spec.py similarity index 100% rename from pinecone/models/pod_spec.py rename to pinecone/db_control/models/pod_spec.py diff --git a/pinecone/db_control/models/restore_job_list.py b/pinecone/db_control/models/restore_job_list.py new file mode 100644 index 00000000..7c80aa96 --- /dev/null +++ b/pinecone/db_control/models/restore_job_list.py @@ -0,0 +1,50 @@ +import json +from pinecone.core.openapi.db_control.model.restore_job_list import ( + RestoreJobList as OpenAPIRestoreJobList, +) +from .restore_job_model import RestoreJobModel + +from datetime import datetime + + +def custom_serializer(obj): + if isinstance(obj, datetime): + return obj.isoformat() + else: + return str(obj) + + +class RestoreJobList: + def __init__(self, restore_job_list: OpenAPIRestoreJobList): + self._restore_job_list = restore_job_list + self._restore_jobs = [RestoreJobModel(r) for r in self._restore_job_list.data] + + def __getitem__(self, key): + if isinstance(key, int): + return self._restore_jobs[key] + elif key == "data": + return self._restore_jobs + else: + # pagination and any other keys added in the future + return self._restore_job_list[key] + + def __getattr__(self, attr): + if attr == "data": + return self._restore_jobs + else: + # pagination and any other keys added in the future + return getattr(self._restore_job_list, attr) + + def __len__(self): + return len(self._restore_jobs) + + def __iter__(self): + return iter(self._restore_jobs) + + def __str__(self): + return str(self._restore_jobs) + + def __repr__(self): + return json.dumps( + [i.to_dict() for i in self._restore_jobs], indent=4, default=custom_serializer + ) diff --git a/pinecone/db_control/models/restore_job_model.py b/pinecone/db_control/models/restore_job_model.py new file mode 100644 index 00000000..1dc6902d --- /dev/null +++ b/pinecone/db_control/models/restore_job_model.py @@ -0,0 +1,25 @@ +import json +from pinecone.core.openapi.db_control.model.restore_job_model import ( + RestoreJobModel as OpenAPIRestoreJobModel, +) +from pinecone.utils.repr_overrides import custom_serializer + + +class RestoreJobModel: + def __init__(self, restore_job: OpenAPIRestoreJobModel): + self.restore_job = restore_job + + def __str__(self): + return str(self.restore_job) + + def __getattr__(self, attr): + return getattr(self.restore_job, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + return self.restore_job.to_dict() diff --git a/pinecone/models/serverless_spec.py b/pinecone/db_control/models/serverless_spec.py similarity index 100% rename from pinecone/models/serverless_spec.py rename to pinecone/db_control/models/serverless_spec.py diff --git a/pinecone/control/repr_overrides.py b/pinecone/db_control/repr_overrides.py similarity index 67% rename from pinecone/control/repr_overrides.py rename to pinecone/db_control/repr_overrides.py index 98e4b4d4..ce6e9611 100644 --- a/pinecone/control/repr_overrides.py +++ b/pinecone/db_control/repr_overrides.py @@ -1,6 +1,5 @@ -from pinecone.utils import install_json_repr_override -from pinecone.models.index_model import IndexModel -from pinecone.core.openapi.db_control.models import CollectionModel +from pinecone.utils.repr_overrides import install_json_repr_override +from pinecone.core.openapi.db_control.model.collection_model import CollectionModel def install_repr_overrides(): @@ -12,5 +11,5 @@ def install_repr_overrides(): from pprint.pformat seems better for data plane objects such as lists of query results. """ - for model in [IndexModel, CollectionModel]: + for model in [CollectionModel]: install_json_repr_override(model) diff --git a/pinecone/control/request_factory.py b/pinecone/db_control/request_factory.py similarity index 77% rename from pinecone/control/request_factory.py rename to pinecone/db_control/request_factory.py index d4d0ce63..76fbd6a0 100644 --- a/pinecone/control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -2,27 +2,40 @@ from typing import Optional, Dict, Any, Union from enum import Enum +from pinecone.utils import parse_non_empty_args, convert_enum_to_string -from pinecone.utils import convert_enum_to_string -from pinecone.core.openapi.db_control.models import ( - CreateCollectionRequest, +from pinecone.core.openapi.db_control.model.create_collection_request import CreateCollectionRequest +from pinecone.core.openapi.db_control.model.create_index_for_model_request import ( CreateIndexForModelRequest, +) +from pinecone.core.openapi.db_control.model.create_index_for_model_request_embed import ( CreateIndexForModelRequestEmbed, - CreateIndexRequest, - ConfigureIndexRequest, +) +from pinecone.core.openapi.db_control.model.create_index_request import CreateIndexRequest +from pinecone.core.openapi.db_control.model.configure_index_request import ConfigureIndexRequest +from pinecone.core.openapi.db_control.model.configure_index_request_spec import ( ConfigureIndexRequestSpec, +) +from pinecone.core.openapi.db_control.model.configure_index_request_spec_pod import ( ConfigureIndexRequestSpecPod, +) +from pinecone.core.openapi.db_control.model.deletion_protection import ( DeletionProtection as DeletionProtectionModel, - IndexSpec, - IndexTags, +) +from pinecone.core.openapi.db_control.model.index_spec import IndexSpec +from pinecone.core.openapi.db_control.model.index_tags import IndexTags +from pinecone.core.openapi.db_control.model.serverless_spec import ( ServerlessSpec as ServerlessSpecModel, - PodSpec as PodSpecModel, - PodSpecMetadataConfig, ) -from pinecone.models import ServerlessSpec, PodSpec, IndexModel, IndexEmbed -from pinecone.utils import parse_non_empty_args +from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec as ByocSpecModel +from pinecone.core.openapi.db_control.model.pod_spec import PodSpec as PodSpecModel +from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import PodSpecMetadataConfig +from pinecone.core.openapi.db_control.model.create_index_from_backup_request import ( + CreateIndexFromBackupRequest, +) +from pinecone.db_control.models import ServerlessSpec, PodSpec, ByocSpec, IndexModel, IndexEmbed -from pinecone.enums import ( +from pinecone.db_control.enums import ( Metric, VectorType, DeletionProtection, @@ -64,7 +77,7 @@ def __parse_deletion_protection( raise ValueError("deletion_protection must be either 'enabled' or 'disabled'") @staticmethod - def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: + def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec]) -> IndexSpec: if isinstance(spec, dict): if "serverless" in spec: spec["serverless"]["cloud"] = convert_enum_to_string(spec["serverless"]["cloud"]) @@ -88,8 +101,10 @@ def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: indexed=args_dict["metadata_config"].get("indexed", None) ) index_spec = IndexSpec(pod=PodSpecModel(**args_dict)) + elif "byoc" in spec: + index_spec = IndexSpec(byoc=ByocSpecModel(**spec["byoc"])) else: - raise ValueError("spec must contain either 'serverless' or 'pod' key") + raise ValueError("spec must contain either 'serverless', 'pod', or 'byoc' key") elif isinstance(spec, ServerlessSpec): index_spec = IndexSpec( serverless=ServerlessSpecModel(cloud=spec.cloud, region=spec.region) @@ -111,15 +126,18 @@ def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec]) -> IndexSpec: index_spec = IndexSpec( pod=PodSpecModel(environment=spec.environment, pod_type=spec.pod_type, **args_dict) ) + elif isinstance(spec, ByocSpec): + args_dict = parse_non_empty_args([("environment", spec.environment)]) + index_spec = IndexSpec(byoc=ByocSpecModel(**args_dict)) else: - raise TypeError("spec must be of type dict, ServerlessSpec, or PodSpec") + raise TypeError("spec must be of type dict, ServerlessSpec, PodSpec, or ByocSpec") return index_spec @staticmethod def create_index_request( name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], dimension: Optional[int] = None, metric: Optional[Union[Metric, str]] = Metric.COSINE, deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, @@ -201,6 +219,21 @@ def create_index_for_model_request( return CreateIndexForModelRequest(**args) + @staticmethod + def create_index_from_backup_request( + name: str, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + tags: Optional[Dict[str, str]] = None, + ) -> CreateIndexFromBackupRequest: + if deletion_protection is not None: + dp = PineconeDBControlRequestFactory.__parse_deletion_protection(deletion_protection) + else: + dp = None + + tags_obj = PineconeDBControlRequestFactory.__parse_tags(tags) + + return CreateIndexFromBackupRequest(name=name, deletion_protection=dp, tags=tags_obj) + @staticmethod def configure_index_request( description: IndexModel, diff --git a/pinecone/db_control/resources/__init__.py b/pinecone/db_control/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pinecone/db_control/resources/asyncio/__init__.py b/pinecone/db_control/resources/asyncio/__init__.py new file mode 100644 index 00000000..9a4841d3 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/__init__.py @@ -0,0 +1,2 @@ +from .index import IndexResourceAsyncio +from .collection import CollectionResourceAsyncio diff --git a/pinecone/db_control/resources/asyncio/backup.py b/pinecone/db_control/resources/asyncio/backup.py new file mode 100644 index 00000000..391da1e1 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/backup.py @@ -0,0 +1,93 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest +from pinecone.db_control.models import BackupModel, BackupList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class BackupResourceAsyncio: + def __init__(self, index_api: AsyncioManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + async def list( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> BackupList: + """ + List backups for an index or for the project. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for the next page of backups. + """ + if index_name is not None: + args = parse_non_empty_args( + [ + ("index_name", index_name), + ("limit", limit), + ("pagination_token", pagination_token), + ] + ) + result = await self._index_api.list_index_backups(**args) + return BackupList(result) + else: + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + result = await self._index_api.list_project_backups(**args) + return BackupList(result) + + @require_kwargs + async def create( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> BackupModel: + """ + Create a backup for an index. + + Args: + index_name (str): The name of the index to create a backup for. + backup_name (str): The name of the backup to create. + description (str): The description of the backup. + + Returns: + BackupModel: The created backup. + """ + req = CreateBackupRequest(name=backup_name, description=description) + result = await self._index_api.create_backup( + index_name=index_name, create_backup_request=req + ) + return BackupModel(result) + + @require_kwargs + async def describe(self, *, backup_id: str) -> BackupModel: + """ + Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + + Returns: + BackupModel: The described backup. + """ + result = await self._index_api.describe_backup(backup_id=backup_id) + return BackupModel(result) + + @require_kwargs + async def get(self, *, backup_id: str) -> BackupModel: + """Alias for describe""" + return await self.describe(backup_id=backup_id) + + @require_kwargs + async def delete(self, *, backup_id: str) -> None: + """ + Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + return await self._index_api.delete_backup(backup_id=backup_id) diff --git a/pinecone/db_control/resources/asyncio/collection.py b/pinecone/db_control/resources/asyncio/collection.py new file mode 100644 index 00000000..e7d98a66 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/collection.py @@ -0,0 +1,32 @@ +import logging + +from pinecone.db_control.models import CollectionList + +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.utils import require_kwargs + +logger = logging.getLogger(__name__) +""" @private """ + + +class CollectionResourceAsyncio: + def __init__(self, index_api): + self.index_api = index_api + + @require_kwargs + async def create(self, *, name: str, source: str): + req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) + await self.index_api.create_collection(create_collection_request=req) + + @require_kwargs + async def list(self) -> CollectionList: + response = await self.index_api.list_collections() + return CollectionList(response) + + @require_kwargs + async def delete(self, *, name: str): + await self.index_api.delete_collection(name) + + @require_kwargs + async def describe(self, *, name: str): + return await self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py new file mode 100644 index 00000000..9816f365 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/index.py @@ -0,0 +1,210 @@ +import logging +import asyncio +from typing import Optional, Dict, Union + + +from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexModel, + IndexList, + IndexEmbed, +) +from pinecone.utils import docslinks + +from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, +) +from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.core.openapi.db_control import API_VERSION +from pinecone.utils import require_kwargs + +logger = logging.getLogger(__name__) +""" @private """ + + +class IndexResourceAsyncio: + def __init__(self, index_api, config): + self._index_api = index_api + self._config = config + + @require_kwargs + async def create( + self, + *, + name: str, + spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec], + dimension: Optional[int] = None, + metric: Optional[Union[Metric, str]] = Metric.COSINE, + timeout: Optional[int] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + tags: Optional[Dict[str, str]] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_request( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + ) + resp = await self._index_api.create_index(create_index_request=req) + + if timeout == -1: + return IndexModel(resp) + return await self.__poll_describe_index_until_ready(name, timeout) + + @require_kwargs + async def create_for_model( + self, + *, + name: str, + cloud: Union[CloudProvider, str], + region: Union[AwsRegion, GcpRegion, AzureRegion, str], + embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_for_model_request( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + ) + resp = await self._index_api.create_index_for_model(req) + + if timeout == -1: + return IndexModel(resp) + return await self.__poll_describe_index_until_ready(name, timeout) + + @require_kwargs + async def create_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_from_backup_request( + name=name, deletion_protection=deletion_protection, tags=tags + ) + await self._index_api.create_index_from_backup_operation( + backup_id=backup_id, create_index_from_backup_request=req + ) + return await self.__poll_describe_index_until_ready(name, timeout) + + async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + description = None + + async def is_ready() -> bool: + nonlocal description + description = await self.describe(name=name) + return description.status.ready + + total_wait_time = 0 + if timeout is None: + # Wait indefinitely + while not await is_ready(): + logger.debug( + f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." + ) + total_wait_time += 5 + await asyncio.sleep(5) + + else: + # Wait for a maximum of timeout seconds + while not await is_ready(): + if timeout < 0: + logger.error(f"Index {name} is not ready. Timeout reached.") + link = docslinks["API_DESCRIBE_INDEX"](API_VERSION) + timeout_msg = ( + f"Please call describe_index() to confirm index status. See docs at {link}" + ) + raise TimeoutError(timeout_msg) + + logger.debug( + f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" + ) + total_wait_time += 5 + await asyncio.sleep(5) + timeout -= 5 + + return description + + @require_kwargs + async def delete(self, *, name: str, timeout: Optional[int] = None): + await self._index_api.delete_index(name) + + if timeout == -1: + return + + if timeout is None: + while await self.has(name=name): + await asyncio.sleep(5) + else: + while await self.has(name=name) and timeout >= 0: + await asyncio.sleep(5) + timeout -= 5 + if timeout and timeout < 0: + raise ( + TimeoutError( + "Please call the list_indexes API ({}) to confirm if index is deleted".format( + "https://www.pinecone.io/docs/api/operation/list_indexes/" + ) + ) + ) + + @require_kwargs + async def list(self) -> IndexList: + response = await self._index_api.list_indexes() + return IndexList(response) + + @require_kwargs + async def describe(self, *, name: str) -> IndexModel: + description = await self._index_api.describe_index(name) + return IndexModel(description) + + @require_kwargs + async def has(self, *, name: str) -> bool: + available_indexes = await self.list() + if name in available_indexes.names(): + return True + else: + return False + + @require_kwargs + async def configure( + self, + *, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union[PodType, str]] = None, + deletion_protection: Optional[Union[DeletionProtection, str]] = None, + tags: Optional[Dict[str, str]] = None, + ): + description = await self.describe(name=name) + + req = PineconeDBControlRequestFactory.configure_index_request( + description=description, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + await self._index_api.configure_index(name, configure_index_request=req) diff --git a/pinecone/db_control/resources/asyncio/restore_job.py b/pinecone/db_control/resources/asyncio/restore_job.py new file mode 100644 index 00000000..397a5050 --- /dev/null +++ b/pinecone/db_control/resources/asyncio/restore_job.py @@ -0,0 +1,56 @@ +from typing import Optional + +from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi +from pinecone.db_control.models import RestoreJobModel, RestoreJobList +from pinecone.utils import parse_non_empty_args, require_kwargs + + +class RestoreJobResourceAsyncio: + def __init__(self, index_api: AsyncioManageIndexesApi): + self._index_api = index_api + """ @private """ + + @require_kwargs + async def get(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + job = await self._index_api.describe_restore_job(job_id=job_id) + return RestoreJobModel(job) + + @require_kwargs + async def describe(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. Alias for get. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + return await self.get(job_id=job_id) + + @require_kwargs + async def list( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> RestoreJobList: + """ + List all restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for the next page of restore jobs. + + Returns: + List[RestoreJobModel]: The list of restore jobs. + """ + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + jobs = await self._index_api.list_restore_jobs(**args) + return RestoreJobList(jobs) diff --git a/pinecone/db_control/resources/sync/__init__.py b/pinecone/db_control/resources/sync/__init__.py new file mode 100644 index 00000000..cc904d53 --- /dev/null +++ b/pinecone/db_control/resources/sync/__init__.py @@ -0,0 +1,2 @@ +from .index import IndexResource +from .collection import CollectionResource diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py new file mode 100644 index 00000000..8d5d2a0c --- /dev/null +++ b/pinecone/db_control/resources/sync/backup.py @@ -0,0 +1,107 @@ +from typing import Optional, TYPE_CHECKING + +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi +from pinecone.core.openapi.db_control.model.create_backup_request import CreateBackupRequest +from pinecone.db_control.models import BackupModel, BackupList +from pinecone.utils import parse_non_empty_args, require_kwargs, PluginAware + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + + +class BackupResource(PluginAware): + def __init__( + self, + index_api: ManageIndexesApi, + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): + self._index_api = index_api + """ @private """ + + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + def list( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> BackupList: + """ + List backups for an index or for the project. + + Args: + index_name (str): The name of the index to list backups for. If not provided, list all backups for the project. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for the next page of backups. + """ + if index_name is not None: + args = parse_non_empty_args( + [ + ("index_name", index_name), + ("limit", limit), + ("pagination_token", pagination_token), + ] + ) + return BackupList(self._index_api.list_index_backups(**args)) + else: + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + return BackupList(self._index_api.list_project_backups(**args)) + + @require_kwargs + def create(self, *, index_name: str, backup_name: str, description: str = "") -> BackupModel: + """ + Create a backup for an index. + + Args: + index_name (str): The name of the index to create a backup for. + backup_name (str): The name of the backup to create. + description (str): The description of the backup. + + Returns: + BackupModel: The created backup. + """ + req = CreateBackupRequest(name=backup_name, description=description) + return BackupModel( + self._index_api.create_backup(index_name=index_name, create_backup_request=req) + ) + + @require_kwargs + def describe(self, *, backup_id: str) -> BackupModel: + """ + Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + + Returns: + BackupModel: The described backup. + """ + return BackupModel(self._index_api.describe_backup(backup_id=backup_id)) + + @require_kwargs + def get(self, *, backup_id: str) -> BackupModel: + """Alias for describe""" + return self.describe(backup_id=backup_id) + + @require_kwargs + def delete(self, *, backup_id: str) -> None: + """ + Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + return self._index_api.delete_backup(backup_id=backup_id) diff --git a/pinecone/db_control/resources/sync/collection.py b/pinecone/db_control/resources/sync/collection.py new file mode 100644 index 00000000..950452e6 --- /dev/null +++ b/pinecone/db_control/resources/sync/collection.py @@ -0,0 +1,55 @@ +from typing import TYPE_CHECKING +import logging + +from pinecone.db_control.models import CollectionList +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.utils import PluginAware, require_kwargs + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.config import Config, OpenApiConfiguration + + +class CollectionResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): + self.index_api = index_api + """ @private """ + + + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + def create(self, *, name: str, source: str) -> None: + req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) + self.index_api.create_collection(create_collection_request=req) + + @require_kwargs + def list(self) -> CollectionList: + response = self.index_api.list_collections() + return CollectionList(response) + + @require_kwargs + def delete(self, *, name: str) -> None: + self.index_api.delete_collection(name) + + @require_kwargs + def describe(self, *, name: str): + return self.index_api.describe_collection(name).to_dict() diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py new file mode 100644 index 00000000..0e69140c --- /dev/null +++ b/pinecone/db_control/resources/sync/index.py @@ -0,0 +1,256 @@ +import time +import logging +from typing import Optional, Dict, Union, TYPE_CHECKING + +from pinecone.db_control.index_host_store import IndexHostStore + +from pinecone.db_control.models import IndexModel, IndexList, IndexEmbed +from pinecone.utils import docslinks, require_kwargs, PluginAware + +from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory +from pinecone.core.openapi.db_control import API_VERSION + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.models import ServerlessSpec, PodSpec, ByocSpec, IndexEmbed + + +class IndexResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): + self._index_api = index_api + """ @private """ + + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + self._index_host_store = IndexHostStore() + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + def create( + self, + *, + name: str, + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], + dimension: Optional[int] = None, + metric: Optional[Union["Metric", str]] = "cosine", + timeout: Optional[int] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", + tags: Optional[Dict[str, str]] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_request( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + ) + resp = self._index_api.create_index(create_index_request=req) + + if timeout == -1: + return IndexModel(resp) + return self.__poll_describe_index_until_ready(name, timeout) + + @require_kwargs + def create_for_model( + self, + *, + name: str, + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + timeout: Optional[int] = None, + ) -> IndexModel: + req = PineconeDBControlRequestFactory.create_index_for_model_request( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + ) + resp = self._index_api.create_index_for_model(req) + + if timeout == -1: + return IndexModel(resp) + return self.__poll_describe_index_until_ready(name, timeout) + + @require_kwargs + def create_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> IndexModel: + """ + Create an index from a backup. + + Args: + name (str): The name of the index to create. + backup_id (str): The ID of the backup to create the index from. + deletion_protection (DeletionProtection): The deletion protection to use for the index. + tags (Dict[str, str]): The tags to use for the index. + timeout (int): The number of seconds to wait for the index to be ready. If -1, the function will return without polling for the index status to be ready. If None, the function will poll indefinitely for the index to be ready. + + Returns: + IndexModel: The created index. + """ + req = PineconeDBControlRequestFactory.create_index_from_backup_request( + name=name, deletion_protection=deletion_protection, tags=tags + ) + resp = self._index_api.create_index_from_backup_operation( + backup_id=backup_id, create_index_from_backup_request=req + ) + logger.info(f"Creating index from backup. Response: {resp}") + + if timeout == -1: + return self.describe(name=name) + return self.__poll_describe_index_until_ready(name, timeout) + + def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + description = None + + def is_ready() -> bool: + nonlocal description + description = self.describe(name=name) + return description.status.ready + + total_wait_time = 0 + if timeout is None: + # Wait indefinitely + while not is_ready(): + logger.debug( + f"Waiting for index {name} to be ready. Total wait time {total_wait_time} seconds." + ) + total_wait_time += 5 + time.sleep(5) + + else: + # Wait for a maximum of timeout seconds + while not is_ready(): + if timeout < 0: + logger.error(f"Index {name} is not ready. Timeout reached.") + link = docslinks["API_DESCRIBE_INDEX"](API_VERSION) + timeout_msg = ( + f"Please call describe_index() to confirm index status. See docs at {link}" + ) + raise TimeoutError(timeout_msg) + + logger.debug( + f"Waiting for index {name} to be ready. Total wait time: {total_wait_time}" + ) + total_wait_time += 5 + time.sleep(5) + timeout -= 5 + + return description + + @require_kwargs + def delete(self, *, name: str, timeout: Optional[int] = None) -> None: + self._index_api.delete_index(name) + self._index_host_store.delete_host(self.config, name) + + if timeout == -1: + return + + if timeout is None: + while self.has(name=name): + time.sleep(5) + else: + while self.has(name=name) and timeout >= 0: + time.sleep(5) + timeout -= 5 + if timeout and timeout < 0: + raise ( + TimeoutError( + "Please call the list_indexes API ({}) to confirm if index is deleted".format( + "https://www.pinecone.io/docs/api/operation/list_indexes/" + ) + ) + ) + + @require_kwargs + def list(self) -> IndexList: + response = self._index_api.list_indexes() + return IndexList(response) + + @require_kwargs + def describe(self, *, name: str) -> IndexModel: + api_instance = self._index_api + description = api_instance.describe_index(name) + host = description.host + self._index_host_store.set_host(self.config, name, host) + + return IndexModel(description) + + @require_kwargs + def has(self, *, name: str) -> bool: + if name in self.list().names(): + return True + else: + return False + + @require_kwargs + def configure( + self, + *, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: + api_instance = self._index_api + description = self.describe(name=name) + + req = PineconeDBControlRequestFactory.configure_index_request( + description=description, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + api_instance.configure_index(name, configure_index_request=req) + + def _get_host(self, name: str) -> str: + """@private""" + return self._index_host_store.get_host( + api=self._index_api, config=self.config, index_name=name + ) diff --git a/pinecone/db_control/resources/sync/restore_job.py b/pinecone/db_control/resources/sync/restore_job.py new file mode 100644 index 00000000..e47010c8 --- /dev/null +++ b/pinecone/db_control/resources/sync/restore_job.py @@ -0,0 +1,76 @@ +from typing import Optional, TYPE_CHECKING + +from pinecone.db_control.models import RestoreJobModel, RestoreJobList +from pinecone.utils import parse_non_empty_args, require_kwargs, PluginAware + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + + +class RestoreJobResource(PluginAware): + def __init__( + self, + index_api: "ManageIndexesApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int, + ): + self._index_api = index_api + """ @private """ + + self.config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = pool_threads + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + def get(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + job = self._index_api.describe_restore_job(job_id=job_id) + return RestoreJobModel(job) + + @require_kwargs + def describe(self, *, job_id: str) -> RestoreJobModel: + """ + Get a restore job by ID. Alias for get. + + Args: + job_id (str): The ID of the restore job to get. + + Returns: + RestoreJobModel: The restore job. + """ + return self.get(job_id=job_id) + + @require_kwargs + def list( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> RestoreJobList: + """ + List all restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for the next page of restore jobs. + + Returns: + List[RestoreJobModel]: The list of restore jobs. + """ + args = parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) + jobs = self._index_api.list_restore_jobs(**args) + return RestoreJobList(jobs) diff --git a/pinecone/control/types/__init__.py b/pinecone/db_control/types/__init__.py similarity index 100% rename from pinecone/control/types/__init__.py rename to pinecone/db_control/types/__init__.py diff --git a/pinecone/control/types/create_index_for_model_embed.py b/pinecone/db_control/types/create_index_for_model_embed.py similarity index 72% rename from pinecone/control/types/create_index_for_model_embed.py rename to pinecone/db_control/types/create_index_for_model_embed.py index 123474a0..ab7e43ac 100644 --- a/pinecone/control/types/create_index_for_model_embed.py +++ b/pinecone/db_control/types/create_index_for_model_embed.py @@ -1,6 +1,6 @@ from typing import TypedDict, Dict, Union -from ...enums import Metric -from ...data.features.inference import EmbedModel +from pinecone.db_control.enums import Metric +from pinecone.inference import EmbedModel class CreateIndexForModelEmbedTypedDict(TypedDict): diff --git a/pinecone/db_data/__init__.py b/pinecone/db_data/__init__.py new file mode 100644 index 00000000..f2db9a63 --- /dev/null +++ b/pinecone/db_data/__init__.py @@ -0,0 +1,61 @@ +from .index import ( + Index as _Index, + FetchResponse, + QueryResponse, + DescribeIndexStatsResponse, + UpsertResponse, + SparseValues, + Vector, +) +from .dataclasses import * +from .import_error import ( + Index, + IndexClientInstantiationError, + Inference, + InferenceInstantiationError, +) +from .index_asyncio import * +from .errors import ( + VectorDictionaryMissingKeysError, + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + SparseValuesTypeError, + SparseValuesMissingKeysError, + SparseValuesDictionaryExpectedError, + MetadataDictionaryExpectedError, +) + +from .features.bulk_import import ImportErrorMode + + +import warnings + + +def _get_deprecated_import(name, from_module, to_module): + warnings.warn( + f"The import of `{name}` from `{from_module}` has moved to `{to_module}`. " + f"Please update your imports from `from {from_module} import {name}` " + f"to `from {to_module} import {name}`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + ) + # Import from the new location + from pinecone.inference import ( + Inference as _Inference, + AsyncioInference as _AsyncioInference, + RerankModel, + EmbedModel, + ) + + return locals()[name] + + +moved = ["_Inference", "_AsyncioInference", "RerankModel", "EmbedModel"] + + +def __getattr__(name): + if name in locals(): + return locals()[name] + elif name in moved: + return _get_deprecated_import(name, "pinecone.data", "pinecone.inference") + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") diff --git a/pinecone/data/dataclasses/__init__.py b/pinecone/db_data/dataclasses/__init__.py similarity index 100% rename from pinecone/data/dataclasses/__init__.py rename to pinecone/db_data/dataclasses/__init__.py diff --git a/pinecone/data/dataclasses/fetch_response.py b/pinecone/db_data/dataclasses/fetch_response.py similarity index 100% rename from pinecone/data/dataclasses/fetch_response.py rename to pinecone/db_data/dataclasses/fetch_response.py diff --git a/pinecone/data/dataclasses/search_query.py b/pinecone/db_data/dataclasses/search_query.py similarity index 100% rename from pinecone/data/dataclasses/search_query.py rename to pinecone/db_data/dataclasses/search_query.py diff --git a/pinecone/data/dataclasses/search_query_vector.py b/pinecone/db_data/dataclasses/search_query_vector.py similarity index 100% rename from pinecone/data/dataclasses/search_query_vector.py rename to pinecone/db_data/dataclasses/search_query_vector.py diff --git a/pinecone/data/dataclasses/search_rerank.py b/pinecone/db_data/dataclasses/search_rerank.py similarity index 97% rename from pinecone/data/dataclasses/search_rerank.py rename to pinecone/db_data/dataclasses/search_rerank.py index 1b9534ba..0ac4ca4e 100644 --- a/pinecone/data/dataclasses/search_rerank.py +++ b/pinecone/db_data/dataclasses/search_rerank.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from typing import Optional, Dict, Any, List -from ..features.inference import RerankModel +from pinecone.inference import RerankModel @dataclass diff --git a/pinecone/data/dataclasses/sparse_values.py b/pinecone/db_data/dataclasses/sparse_values.py similarity index 100% rename from pinecone/data/dataclasses/sparse_values.py rename to pinecone/db_data/dataclasses/sparse_values.py diff --git a/pinecone/data/dataclasses/utils.py b/pinecone/db_data/dataclasses/utils.py similarity index 100% rename from pinecone/data/dataclasses/utils.py rename to pinecone/db_data/dataclasses/utils.py diff --git a/pinecone/data/dataclasses/vector.py b/pinecone/db_data/dataclasses/vector.py similarity index 100% rename from pinecone/data/dataclasses/vector.py rename to pinecone/db_data/dataclasses/vector.py diff --git a/pinecone/data/errors.py b/pinecone/db_data/errors.py similarity index 100% rename from pinecone/data/errors.py rename to pinecone/db_data/errors.py diff --git a/pinecone/data/features/bulk_import/__init__.py b/pinecone/db_data/features/bulk_import/__init__.py similarity index 100% rename from pinecone/data/features/bulk_import/__init__.py rename to pinecone/db_data/features/bulk_import/__init__.py diff --git a/pinecone/data/features/bulk_import/bulk_import.py b/pinecone/db_data/features/bulk_import/bulk_import.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import.py rename to pinecone/db_data/features/bulk_import/bulk_import.py diff --git a/pinecone/data/features/bulk_import/bulk_import_asyncio.py b/pinecone/db_data/features/bulk_import/bulk_import_asyncio.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import_asyncio.py rename to pinecone/db_data/features/bulk_import/bulk_import_asyncio.py diff --git a/pinecone/data/features/bulk_import/bulk_import_request_factory.py b/pinecone/db_data/features/bulk_import/bulk_import_request_factory.py similarity index 100% rename from pinecone/data/features/bulk_import/bulk_import_request_factory.py rename to pinecone/db_data/features/bulk_import/bulk_import_request_factory.py diff --git a/pinecone/data/import_error.py b/pinecone/db_data/import_error.py similarity index 100% rename from pinecone/data/import_error.py rename to pinecone/db_data/import_error.py diff --git a/pinecone/data/index.py b/pinecone/db_data/index.py similarity index 90% rename from pinecone/data/index.py rename to pinecone/db_data/index.py index ebd5cecd..6c78b849 100644 --- a/pinecone/data/index.py +++ b/pinecone/db_data/index.py @@ -1,8 +1,8 @@ from pinecone.utils.tqdm import tqdm - +import warnings import logging import json -from typing import Union, List, Optional, Dict, Any, Literal +from typing import Union, List, Optional, Dict, Any, Literal, TYPE_CHECKING from pinecone.config import ConfigBuilder @@ -45,6 +45,9 @@ from concurrent.futures import as_completed +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + logger = logging.getLogger(__name__) """ @private """ @@ -55,7 +58,7 @@ def parse_query_response(response: QueryResponse): return response -class Index(IndexInterface, ImportFeatureMixin, PluginAware): +class Index(PluginAware, IndexInterface, ImportFeatureMixin): """ A client for interacting with a Pinecone index via REST API. For improved performance, use the Pinecone GRPC index client. @@ -70,29 +73,29 @@ def __init__( openapi_config=None, **kwargs, ): - self.config = ConfigBuilder.build( + self._config = ConfigBuilder.build( api_key=api_key, host=host, additional_headers=additional_headers, **kwargs ) """ @private """ - self.openapi_config = ConfigBuilder.build_openapi_config(self.config, openapi_config) + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, openapi_config) """ @private """ if pool_threads is None: - self.pool_threads = 5 * cpu_count() + self._pool_threads = 5 * cpu_count() """ @private """ else: - self.pool_threads = pool_threads + self._pool_threads = pool_threads """ @private """ if kwargs.get("connection_pool_maxsize", None): - self.openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") + self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") self._vector_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=VectorOperationsApi, - config=self.config, - openapi_config=self.openapi_config, - pool_threads=pool_threads, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, api_version=API_VERSION, ) @@ -101,9 +104,30 @@ def __init__( # Pass the same api_client to the ImportFeatureMixin super().__init__(api_client=self._api_client) - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads + @property + def config(self) -> "Config": + """@private""" + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, ) + return self._pool_threads def _openapi_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]: return filter_dict(kwargs, OPENAPI_ENDPOINT_PARAMS) diff --git a/pinecone/data/index_asyncio.py b/pinecone/db_data/index_asyncio.py similarity index 100% rename from pinecone/data/index_asyncio.py rename to pinecone/db_data/index_asyncio.py diff --git a/pinecone/data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py similarity index 100% rename from pinecone/data/index_asyncio_interface.py rename to pinecone/db_data/index_asyncio_interface.py diff --git a/pinecone/data/interfaces.py b/pinecone/db_data/interfaces.py similarity index 100% rename from pinecone/data/interfaces.py rename to pinecone/db_data/interfaces.py diff --git a/pinecone/db_data/models/__init__.py b/pinecone/db_data/models/__init__.py new file mode 100644 index 00000000..a14d3600 --- /dev/null +++ b/pinecone/db_data/models/__init__.py @@ -0,0 +1 @@ +from pinecone.core.openapi.db_data.models import * diff --git a/pinecone/data/query_results_aggregator.py b/pinecone/db_data/query_results_aggregator.py similarity index 100% rename from pinecone/data/query_results_aggregator.py rename to pinecone/db_data/query_results_aggregator.py diff --git a/pinecone/data/request_factory.py b/pinecone/db_data/request_factory.py similarity index 100% rename from pinecone/data/request_factory.py rename to pinecone/db_data/request_factory.py diff --git a/pinecone/data/sparse_values_factory.py b/pinecone/db_data/sparse_values_factory.py similarity index 100% rename from pinecone/data/sparse_values_factory.py rename to pinecone/db_data/sparse_values_factory.py diff --git a/pinecone/data/types/__init__.py b/pinecone/db_data/types/__init__.py similarity index 100% rename from pinecone/data/types/__init__.py rename to pinecone/db_data/types/__init__.py diff --git a/pinecone/data/types/query_filter.py b/pinecone/db_data/types/query_filter.py similarity index 100% rename from pinecone/data/types/query_filter.py rename to pinecone/db_data/types/query_filter.py diff --git a/pinecone/data/types/search_query_typed_dict.py b/pinecone/db_data/types/search_query_typed_dict.py similarity index 100% rename from pinecone/data/types/search_query_typed_dict.py rename to pinecone/db_data/types/search_query_typed_dict.py diff --git a/pinecone/data/types/search_query_vector_typed_dict.py b/pinecone/db_data/types/search_query_vector_typed_dict.py similarity index 100% rename from pinecone/data/types/search_query_vector_typed_dict.py rename to pinecone/db_data/types/search_query_vector_typed_dict.py diff --git a/pinecone/data/types/search_rerank_typed_dict.py b/pinecone/db_data/types/search_rerank_typed_dict.py similarity index 96% rename from pinecone/data/types/search_rerank_typed_dict.py rename to pinecone/db_data/types/search_rerank_typed_dict.py index 89c4f8d8..2d04fe82 100644 --- a/pinecone/data/types/search_rerank_typed_dict.py +++ b/pinecone/db_data/types/search_rerank_typed_dict.py @@ -1,5 +1,5 @@ from typing import TypedDict, Optional, Union, Dict, Any -from ..features.inference import RerankModel +from pinecone.inference import RerankModel class SearchRerankTypedDict(TypedDict): diff --git a/pinecone/data/types/sparse_vector_typed_dict.py b/pinecone/db_data/types/sparse_vector_typed_dict.py similarity index 100% rename from pinecone/data/types/sparse_vector_typed_dict.py rename to pinecone/db_data/types/sparse_vector_typed_dict.py diff --git a/pinecone/data/types/vector_metadata_dict.py b/pinecone/db_data/types/vector_metadata_dict.py similarity index 100% rename from pinecone/data/types/vector_metadata_dict.py rename to pinecone/db_data/types/vector_metadata_dict.py diff --git a/pinecone/data/types/vector_tuple.py b/pinecone/db_data/types/vector_tuple.py similarity index 100% rename from pinecone/data/types/vector_tuple.py rename to pinecone/db_data/types/vector_tuple.py diff --git a/pinecone/data/types/vector_typed_dict.py b/pinecone/db_data/types/vector_typed_dict.py similarity index 100% rename from pinecone/data/types/vector_typed_dict.py rename to pinecone/db_data/types/vector_typed_dict.py diff --git a/pinecone/data/vector_factory.py b/pinecone/db_data/vector_factory.py similarity index 100% rename from pinecone/data/vector_factory.py rename to pinecone/db_data/vector_factory.py diff --git a/pinecone/exceptions/__init__.py b/pinecone/exceptions/__init__.py index 92b05fd7..f437e90b 100644 --- a/pinecone/exceptions/__init__.py +++ b/pinecone/exceptions/__init__.py @@ -1,4 +1,7 @@ -from pinecone.openapi_support.exceptions import ( +from .exceptions import ( + PineconeConfigurationError, + PineconeProtocolError, + ListConversionException, PineconeException, PineconeApiAttributeError, PineconeApiTypeError, @@ -10,7 +13,6 @@ ForbiddenException, ServiceException, ) -from .exceptions import PineconeConfigurationError, PineconeProtocolError, ListConversionException __all__ = [ "PineconeConfigurationError", diff --git a/pinecone/exceptions/exceptions.py b/pinecone/exceptions/exceptions.py index 3860dc8b..32eed99f 100644 --- a/pinecone/exceptions/exceptions.py +++ b/pinecone/exceptions/exceptions.py @@ -1,4 +1,143 @@ -from pinecone.openapi_support.exceptions import PineconeException +class PineconeException(Exception): + """The base exception class for all exceptions in the Pinecone Python SDK""" + + +class PineconeApiTypeError(PineconeException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: + """Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiTypeError, self).__init__(full_msg) + + +class PineconeApiValueError(PineconeException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiValueError, self).__init__(full_msg) + + +class PineconeApiAttributeError(PineconeException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiAttributeError, self).__init__(full_msg) + + +class PineconeApiKeyError(PineconeException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(PineconeApiKeyError, self).__init__(full_msg) + + +class PineconeApiException(PineconeException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\nReason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format(self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +class NotFoundException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(NotFoundException, self).__init__(status, reason, http_resp) + + +class UnauthorizedException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(UnauthorizedException, self).__init__(status, reason, http_resp) + + +class ForbiddenException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(ForbiddenException, self).__init__(status, reason, http_resp) + + +class ServiceException(PineconeApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(ServiceException, self).__init__(status, reason, http_resp) + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result class PineconeProtocolError(PineconeException): diff --git a/pinecone/grpc/__init__.py b/pinecone/grpc/__init__.py index 350047ca..66adb916 100644 --- a/pinecone/grpc/__init__.py +++ b/pinecone/grpc/__init__.py @@ -49,7 +49,7 @@ from .config import GRPCClientConfig from .future import PineconeGrpcFuture -from pinecone.data.dataclasses import Vector, SparseValues +from pinecone.db_data.dataclasses import Vector, SparseValues from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( Vector as GRPCVector, diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index 9b68c0b6..bfaf8fff 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -21,7 +21,7 @@ QueryResponse, IndexDescription as DescribeIndexStatsResponse, ) -from pinecone.models.list_response import ListResponse as SimpleListResponse, Pagination +from pinecone.db_control.models.list_response import ListResponse as SimpleListResponse, Pagination from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( Vector as GRPCVector, QueryVector as GRPCQueryVector, @@ -38,11 +38,11 @@ SparseValues as GRPCSparseValues, ) from pinecone import Vector, SparseValues -from pinecone.data.query_results_aggregator import QueryNamespacesResults, QueryResultsAggregator +from pinecone.db_data.query_results_aggregator import QueryNamespacesResults, QueryResultsAggregator from pinecone.core.grpc.protos.db_data_2025_01_pb2_grpc import VectorServiceStub from .base import GRPCIndexBase from .future import PineconeGrpcFuture -from ..data.types import ( +from ..db_data.types import ( SparseVectorTypedDict, VectorTypedDict, VectorTuple, diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index c78481ff..7c869e8f 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -1,5 +1,5 @@ -from ..control.pinecone import Pinecone -from ..config.config import ConfigBuilder +from pinecone import Pinecone +from pinecone.config import ConfigBuilder from .index_grpc import GRPCIndex @@ -122,15 +122,15 @@ def Index(self, name: str = "", host: str = "", **kwargs): raise ValueError("Either name or host must be specified") # Use host if it is provided, otherwise get host from describe_index - index_host = host or self.index_host_store.get_host(self.index_api, self.config, name) + index_host = host or self.db.index._get_host(name) - pt = kwargs.pop("pool_threads", None) or self.pool_threads + pt = kwargs.pop("pool_threads", None) or self._pool_threads config = ConfigBuilder.build( - api_key=self.config.api_key, + api_key=self._config.api_key, host=index_host, - source_tag=self.config.source_tag, - proxy_url=self.config.proxy_url, - ssl_ca_certs=self.config.ssl_ca_certs, + source_tag=self._config.source_tag, + proxy_url=self._config.proxy_url, + ssl_ca_certs=self._config.ssl_ca_certs, ) return GRPCIndex(index_name=name, config=config, pool_threads=pt, **kwargs) diff --git a/pinecone/grpc/sparse_values_factory.py b/pinecone/grpc/sparse_values_factory.py index 240cd8e1..5bb14685 100644 --- a/pinecone/grpc/sparse_values_factory.py +++ b/pinecone/grpc/sparse_values_factory.py @@ -3,8 +3,8 @@ from ..utils import convert_to_list -from ..data import SparseValuesTypeError, SparseValuesMissingKeysError -from ..data.types import SparseVectorTypedDict +from ..db_data import SparseValuesTypeError, SparseValuesMissingKeysError +from ..db_data.types import SparseVectorTypedDict from pinecone.core.grpc.protos.db_data_2025_01_pb2 import SparseValues as GRPCSparseValues from pinecone.core.openapi.db_data.models import SparseValues as OpenApiSparseValues diff --git a/pinecone/grpc/utils.py b/pinecone/grpc/utils.py index dcd19710..c2869e73 100644 --- a/pinecone/grpc/utils.py +++ b/pinecone/grpc/utils.py @@ -13,7 +13,7 @@ IndexDescription as DescribeIndexStatsResponse, NamespaceSummary, ) -from pinecone.data.dataclasses import FetchResponse +from pinecone.db_data.dataclasses import FetchResponse from google.protobuf.struct_pb2 import Struct diff --git a/pinecone/grpc/vector_factory_grpc.py b/pinecone/grpc/vector_factory_grpc.py index 1fe9572b..22efd269 100644 --- a/pinecone/grpc/vector_factory_grpc.py +++ b/pinecone/grpc/vector_factory_grpc.py @@ -8,13 +8,13 @@ from .utils import dict_to_proto_struct from ..utils import fix_tuple_length, convert_to_list from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS -from ..data import ( +from ..db_data import ( VectorDictionaryMissingKeysError, VectorDictionaryExcessKeysError, VectorTupleLengthError, MetadataDictionaryExpectedError, ) -from ..data.types import VectorTuple, VectorTypedDict +from ..db_data.types import VectorTuple, VectorTypedDict from .sparse_values_factory import SparseValuesFactory from pinecone.core.grpc.protos.db_data_2025_01_pb2 import ( diff --git a/pinecone/inference/__init__.py b/pinecone/inference/__init__.py new file mode 100644 index 00000000..235cbc69 --- /dev/null +++ b/pinecone/inference/__init__.py @@ -0,0 +1,7 @@ +from .repl_overrides import install_repl_overrides +from .inference import Inference +from .inference_asyncio import AsyncioInference +from .inference_request_builder import RerankModel, EmbedModel +from .models import ModelInfo, ModelInfoList, EmbeddingsList, RerankResult + +install_repl_overrides() diff --git a/pinecone/data/features/inference/inference.py b/pinecone/inference/inference.py similarity index 57% rename from pinecone/data/features/inference/inference.py rename to pinecone/inference/inference.py index 71ada564..53a52aa3 100644 --- a/pinecone/data/features/inference/inference.py +++ b/pinecone/inference/inference.py @@ -1,12 +1,13 @@ import logging -from typing import Optional, Dict, List, Union, Any +import warnings +from typing import Optional, Dict, List, Union, Any, TYPE_CHECKING from pinecone.openapi_support import ApiClient from pinecone.core.openapi.inference.apis import InferenceApi from .models import EmbeddingsList, RerankResult from pinecone.core.openapi.inference import API_VERSION from pinecone.utils import setup_openapi_client, PluginAware - +from pinecone.utils import require_kwargs from .inference_request_builder import ( InferenceRequestBuilder, @@ -14,6 +15,11 @@ RerankModel as RerankModelEnum, ) +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from .resources.sync.model import Model as ModelResource + from .models import ModelInfo, ModelInfoList + logger = logging.getLogger(__name__) """ @private """ @@ -44,14 +50,20 @@ class Inference(PluginAware): EmbedModel = EmbedModelEnum RerankModel = RerankModelEnum - def __init__(self, config, openapi_config, **kwargs) -> None: - self.config = config + def __init__( + self, + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int = 1, + **kwargs, + ) -> None: + self._config = config """ @private """ - self.openapi_config = openapi_config + self._openapi_config = openapi_config """ @private """ - self.pool_threads = kwargs.get("pool_threads", 1) + self._pool_threads = pool_threads """ @private """ self.__inference_api = setup_openapi_client( @@ -59,13 +71,80 @@ def __init__(self, config, openapi_config, **kwargs) -> None: api_klass=InferenceApi, config=config, openapi_config=openapi_config, - pool_threads=kwargs.get("pool_threads", 1), + pool_threads=self._pool_threads, api_version=API_VERSION, ) - self.load_plugins( - config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads + self._model: Optional["ModelResource"] = None # Lazy initialization + """ @private """ + + super().__init__() # Initialize PluginAware + + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._pool_threads + + @property + def model(self) -> "ModelResource": + """ + Model is a resource that describes models available in the Pinecone Inference API. + + Curently you can get or list models. + + ```python + pc = Pinecone() + + # List all models + models = pc.inference.model.list() + + # List models, with model type filtering + models = pc.inference.model.list(type="embed") + models = pc.inference.model.list(type="rerank") + + # List models, with vector type filtering + models = pc.inference.model.list(vector_type="dense") + models = pc.inference.model.list(vector_type="sparse") + + # List models, with both type and vector type filtering + models = pc.inference.model.list(type="rerank", vector_type="dense") + + # Get details on a specific model + model = pc.inference.model.get("text-embedding-3-small") + ``` + """ + if self._model is None: + from .resources.sync.model import Model as ModelResource + + self._model = ModelResource( + inference_api=self.__inference_api, + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._model def embed( self, @@ -186,3 +265,57 @@ def rerank( ) resp = self.__inference_api.rerank(rerank_request=rerank_request) return RerankResult(resp) + + @require_kwargs + def list_models( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> "ModelInfoList": + """ + List all available models. + + + ```python + pc = Pinecone() + + # List all models + models = pc.inference.list_models() + + # List models, with model type filtering + models = pc.inference.list_models(type="embed") + models = pc.inference.list_models(type="rerank") + + # List models, with vector type filtering + models = pc.inference.list_models(vector_type="dense") + models = pc.inference.list_models(vector_type="sparse") + + # List models, with both type and vector type filtering + models = pc.inference.list_models(type="rerank", vector_type="dense") + ``` + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + return self.model.list(type=type, vector_type=vector_type) + + @require_kwargs + def get_model(self, model_name: str) -> "ModelInfo": + """ + Get details on a specific model. + + ```python + pc = Pinecone() + + model = pc.inference.get_model(model_name="text-embedding-3-small") + ``` + + :param model_name: The name of the model to get details on. + :type model_name: str, required + + :return: A ModelInfo object. + """ + return self.model.get(model_name=model_name) diff --git a/pinecone/data/features/inference/inference_asyncio.py b/pinecone/inference/inference_asyncio.py similarity index 68% rename from pinecone/data/features/inference/inference_asyncio.py rename to pinecone/inference/inference_asyncio.py index 06ec7388..65ec8e79 100644 --- a/pinecone/data/features/inference/inference_asyncio.py +++ b/pinecone/inference/inference_asyncio.py @@ -1,7 +1,8 @@ -from typing import Optional, Dict, List, Union, Any +from typing import Optional, Dict, List, Union, Any, TYPE_CHECKING from pinecone.core.openapi.inference.api.inference_api import AsyncioInferenceApi -from .models import EmbeddingsList, RerankResult +from .models import EmbeddingsList, RerankResult, ModelInfoList, ModelInfo +from pinecone.utils import require_kwargs, parse_non_empty_args from .inference_request_builder import ( InferenceRequestBuilder, @@ -9,6 +10,9 @@ RerankModel as RerankModelEnum, ) +if TYPE_CHECKING: + from .resources.asyncio.model import ModelAsyncio as ModelAsyncioResource + class AsyncioInference: """ @@ -40,6 +44,9 @@ def __init__(self, api_client, **kwargs) -> None: self.api_client = api_client """ @private """ + self._model: Optional["ModelAsyncioResource"] = None + """ @private """ + self.__inference_api = AsyncioInferenceApi(api_client) """ @private """ @@ -84,6 +91,39 @@ async def embed( resp = await self.__inference_api.embed(embed_request=request_body) return EmbeddingsList(resp) + @property + def model(self) -> "ModelAsyncioResource": + """ + Model is a resource that describes models available in the Pinecone Inference API. + + Curently you can get or list models. + + ```python + async with PineconeAsyncio() as pc: + # List all models + models = await pc.inference.model.list() + + # List models, with model type filtering + models = await pc.inference.model.list(type="embed") + models = await pc.inference.model.list(type="rerank") + + # List models, with vector type filtering + models = await pc.inference.model.list(vector_type="dense") + models = await pc.inference.model.list(vector_type="sparse") + + # List models, with both type and vector type filtering + models = await pc.inference.model.list(type="rerank", vector_type="dense") + + # Get details on a specific model + model = await pc.inference.model.get("text-embedding-3-small") + ``` + """ + if self._model is None: + from .resources.asyncio.model import ModelAsyncio as ModelAsyncioResource + + self._model = ModelAsyncioResource(inference_api=self.__inference_api) + return self._model + async def rerank( self, model: str, @@ -162,3 +202,40 @@ async def rerank( ) resp = await self.__inference_api.rerank(rerank_request=rerank_request) return RerankResult(resp) + + @require_kwargs + async def list_models( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + resp = await self.__inference_api.list_models(**args) + return ModelInfoList(resp) + + @require_kwargs + async def get_model(self, model_name: str) -> ModelInfo: + """ + Get details on a specific model. + + ```python + async with PineconeAsyncio() as pc: + model = await pc.inference.get_model(model_name="text-embedding-3-small") + ``` + + :param model_name: The name of the model to get details on. + :type model_name: str, required + + :return: A ModelInfo object. + """ + resp = await self.__inference_api.get_model(model_name=model_name) + return ModelInfo(resp) diff --git a/pinecone/data/features/inference/inference_request_builder.py b/pinecone/inference/inference_request_builder.py similarity index 100% rename from pinecone/data/features/inference/inference_request_builder.py rename to pinecone/inference/inference_request_builder.py diff --git a/pinecone/inference/models/__init__.py b/pinecone/inference/models/__init__.py new file mode 100644 index 00000000..11056408 --- /dev/null +++ b/pinecone/inference/models/__init__.py @@ -0,0 +1,6 @@ +from .embedding_list import EmbeddingsList +from .rerank_result import RerankResult +from .model_info import ModelInfo +from .model_info_list import ModelInfoList + +__all__ = ["EmbeddingsList", "RerankResult", "ModelInfo", "ModelInfoList"] diff --git a/pinecone/data/features/inference/models/embedding_list.py b/pinecone/inference/models/embedding_list.py similarity index 100% rename from pinecone/data/features/inference/models/embedding_list.py rename to pinecone/inference/models/embedding_list.py diff --git a/pinecone/models/index_embed.py b/pinecone/inference/models/index_embed.py similarity index 94% rename from pinecone/models/index_embed.py rename to pinecone/inference/models/index_embed.py index 4d1ccfe3..4c3306d0 100644 --- a/pinecone/models/index_embed.py +++ b/pinecone/inference/models/index_embed.py @@ -1,8 +1,8 @@ from dataclasses import dataclass from typing import Optional, Dict, Any, Union -from ..enums import Metric -from ..data.features.inference import EmbedModel +from pinecone.db_control.enums import Metric +from pinecone.inference.inference_request_builder import EmbedModel @dataclass(frozen=True) diff --git a/pinecone/inference/models/model_info.py b/pinecone/inference/models/model_info.py new file mode 100644 index 00000000..c8e37f21 --- /dev/null +++ b/pinecone/inference/models/model_info.py @@ -0,0 +1,43 @@ +import json +from pinecone.utils.repr_overrides import custom_serializer, install_json_repr_override +from pinecone.core.openapi.inference.model.model_info import ModelInfo as OpenAPIModelInfo +from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter as OpenAPIModelInfoSupportedParameter, +) + +for klass in [ + # OpenAPIModelInfo, + # OpenAPIModelInfoMetric, + OpenAPIModelInfoSupportedParameter + # OpenAPIModelInfoSupportedMetrics, +]: + install_json_repr_override(klass) + + +class ModelInfo: + def __init__(self, model_info: OpenAPIModelInfo): + self._model_info = model_info + if self._model_info.supported_metrics is not None: + self.supported_metrics = [sm.value for sm in self._model_info.supported_metrics.value] + else: + self.supported_metrics = [] + + def __str__(self): + return str(self._model_info) + + def __getattr__(self, attr): + if attr == "supported_metrics": + return self.supported_metrics + else: + return getattr(self._model_info, attr) + + def __getitem__(self, key): + return self.__getattr__(key) + + def __repr__(self): + return json.dumps(self.to_dict(), indent=4, default=custom_serializer) + + def to_dict(self): + raw = self._model_info.to_dict() + raw["supported_metrics"] = self.supported_metrics + return raw diff --git a/pinecone/inference/models/model_info_list.py b/pinecone/inference/models/model_info_list.py new file mode 100644 index 00000000..01d2f2c5 --- /dev/null +++ b/pinecone/inference/models/model_info_list.py @@ -0,0 +1,57 @@ +import json +from typing import List +from pinecone.core.openapi.inference.model.model_info_list import ( + ModelInfoList as OpenAPIModelInfoList, +) +from .model_info import ModelInfo +from pinecone.utils.repr_overrides import custom_serializer + + +class ModelInfoList: + """ + A list of model information. + """ + + def __init__(self, model_info_list: OpenAPIModelInfoList): + self._model_info_list = model_info_list + self._models = [ModelInfo(model_info) for model_info in model_info_list.models] + + def names(self) -> List[str]: + return [i.name for i in self._models] + + def __getitem__(self, key): + if isinstance(key, int): + return self._models[key] + elif key == "models": + # Return mapped models + return self._models + else: + # any other keys added in the future + return self._model_info_list[key] + + def __getattr__(self, attr): + if attr == "models": + return self._models + else: + # any other keys added in the future + return getattr(self._model_info_list, attr) + + def __len__(self): + return len(self._models) + + def __iter__(self): + return iter(self._models) + + def __str__(self): + return str(self._models) + + def __repr__(self): + raw_dict = self._model_info_list.to_dict() + raw_dict["models"] = [i.to_dict() for i in self._models] + + # Remove keys with value None + for key, value in list(raw_dict.items()): + if value is None: + del raw_dict[key] + + return json.dumps(raw_dict, indent=4, default=custom_serializer) diff --git a/pinecone/data/features/inference/models/rerank_result.py b/pinecone/inference/models/rerank_result.py similarity index 100% rename from pinecone/data/features/inference/models/rerank_result.py rename to pinecone/inference/models/rerank_result.py diff --git a/pinecone/data/features/inference/repl_overrides.py b/pinecone/inference/repl_overrides.py similarity index 100% rename from pinecone/data/features/inference/repl_overrides.py rename to pinecone/inference/repl_overrides.py diff --git a/pinecone/inference/resources/asyncio/model.py b/pinecone/inference/resources/asyncio/model.py new file mode 100644 index 00000000..2d54ebd2 --- /dev/null +++ b/pinecone/inference/resources/asyncio/model.py @@ -0,0 +1,47 @@ +from typing import TYPE_CHECKING, Optional +from pinecone.utils import require_kwargs, parse_non_empty_args +from ...models import ModelInfoList, ModelInfo + + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.api.inference_api import AsyncioInferenceApi + + +class ModelAsyncio: + def __init__(self, inference_api: "AsyncioInferenceApi") -> None: + self.__inference_api = inference_api + """ @private """ + + super().__init__() # Initialize PluginAware + + @require_kwargs + async def list( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + model_list = await self.__inference_api.list_models(**args) + return ModelInfoList(model_list) + + @require_kwargs + async def get(self, model_name: str) -> ModelInfo: + """ + Get a specific model by name. + + :param model_name: The name of the model to get. + :type model_name: str, required + + :return: A model. + """ + model_info = await self.__inference_api.get_model(model_name=model_name) + return ModelInfo(model_info) diff --git a/pinecone/inference/resources/sync/model.py b/pinecone/inference/resources/sync/model.py new file mode 100644 index 00000000..19b97f90 --- /dev/null +++ b/pinecone/inference/resources/sync/model.py @@ -0,0 +1,69 @@ +from typing import TYPE_CHECKING, Optional +from pinecone.utils import PluginAware, require_kwargs, parse_non_empty_args +from ...models import ModelInfoList, ModelInfo + + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.core.openapi.inference.api.inference_api import InferenceApi + + +class Model(PluginAware): + def __init__( + self, + inference_api: "InferenceApi", + config: "Config", + openapi_config: "OpenApiConfiguration", + pool_threads: int = 1, + **kwargs, + ) -> None: + self._config = config + """ @private """ + + self._openapi_config = openapi_config + """ @private """ + + self._pool_threads = kwargs.get("pool_threads", 1) + """ @private """ + + self.__inference_api = inference_api + """ @private """ + + super().__init__() # Initialize PluginAware + + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @require_kwargs + def list( + self, *, type: Optional[str] = None, vector_type: Optional[str] = None + ) -> ModelInfoList: + """ + List all available models. + + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional + + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional + + :return: A list of models. + """ + args = parse_non_empty_args([("type", type), ("vector_type", vector_type)]) + return ModelInfoList(self.__inference_api.list_models(**args)) + + @require_kwargs + def get(self, model_name: str) -> ModelInfo: + """ + Get a specific model by name. + + :param model_name: The name of the model to get. + :type model_name: str, required + + :return: A model. + """ + return ModelInfo(self.__inference_api.get_model(model_name=model_name)) diff --git a/pinecone/control/langchain_import_warnings.py b/pinecone/langchain_import_warnings.py similarity index 100% rename from pinecone/control/langchain_import_warnings.py rename to pinecone/langchain_import_warnings.py diff --git a/pinecone/control/pinecone_interface.py b/pinecone/legacy_pinecone_interface.py similarity index 84% rename from pinecone/control/pinecone_interface.py rename to pinecone/legacy_pinecone_interface.py index c183e611..27e893d7 100644 --- a/pinecone/control/pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -1,30 +1,35 @@ from abc import ABC, abstractmethod -from typing import Optional, Dict, Union - - -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexList, - CollectionList, - IndexModel, - IndexEmbed, -) -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict - - -class PineconeDBControlInterface(ABC): +from typing import Optional, Dict, Union, TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexList, + CollectionList, + IndexModel, + IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, + ) + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict + + +class LegacyPineconeDBControlInterface(ABC): @abstractmethod def __init__( self, @@ -190,14 +195,16 @@ def __init__( def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int], - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "Metric.COSINE", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", + vector_type: Optional[Union["VectorType", str]] = "VectorType.DENSE", tags: Optional[Dict[str, str]] = None, - ) -> IndexModel: + ) -> "IndexModel": """Creates a Pinecone index. :param name: The name of the index to create. Must be unique within your project and @@ -208,7 +215,7 @@ def create_index( :type metric: str, optional :param spec: A dictionary containing configurations describing how the index should be deployed. For serverless indexes, specify region and cloud. For pod indexes, specify replicas, shards, pods, pod_type, metadata_config, and source_collection. - Alternatively, use the `ServerlessSpec` or `PodSpec` objects to specify these configurations. + Alternatively, use the `ServerlessSpec`, `PodSpec`, or `ByocSpec` objects to specify these configurations. :type spec: Dict :param dimension: If you are creating an index with `vector_type="dense"` (which is the default), you need to specify `dimension` to indicate the size of your vectors. This should match the dimension of the embeddings you will be inserting. For example, if you are using @@ -295,17 +302,50 @@ def create_index( """ pass + @abstractmethod + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + """ + Create an index from a backup. + + Call `list_backups` to get a list of backups for your project. + + :param name: The name of the index to create. + :type name: str + :param backup_id: The ID of the backup to restore. + :type backup_id: str + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :type deletion_protection: Optional[Literal["enabled", "disabled"]] + :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. + :type tags: Optional[Dict[str, str]] + :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; + if -1, return immediately and do not wait. + :return: A description of the index that was created. + :rtype: IndexModel + """ + pass + @abstractmethod def create_index_for_model( self, + *, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[ + Union["DeletionProtection", str] + ] = "DeletionProtection.DISABLED", timeout: Optional[int] = None, - ) -> IndexModel: + ) -> "IndexModel": """ :param name: The name of the index to create. Must be unique within your project and cannot be changed once created. Allowed characters are lowercase letters, numbers, @@ -414,7 +454,7 @@ def delete_index(self, name: str, timeout: Optional[int] = None): pass @abstractmethod - def list_indexes(self) -> IndexList: + def list_indexes(self) -> "IndexList": """ :return: Returns an `IndexList` object, which is iterable and contains a list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` @@ -447,7 +487,7 @@ def list_indexes(self) -> IndexList: pass @abstractmethod - def describe_index(self, name: str) -> IndexModel: + def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. :return: Returns an `IndexModel` object @@ -534,8 +574,8 @@ def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): """ @@ -622,7 +662,7 @@ def configure_index( pass @abstractmethod - def create_collection(self, name: str, source: str): + def create_collection(self, name: str, source: str) -> None: """Create a collection from a pod-based index :param name: Name of the collection @@ -631,7 +671,7 @@ def create_collection(self, name: str, source: str): pass @abstractmethod - def list_collections(self) -> CollectionList: + def list_collections(self) -> "CollectionList": """List all collections ```python @@ -697,6 +737,77 @@ def describe_collection(self, name: str): """ pass + @abstractmethod + def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + """Create a backup of an index. + + Args: + index_name (str): The name of the index to backup. + backup_name (str): The name to give the backup. + description (str): Optional description of the backup. + """ + pass + + @abstractmethod + def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + """List backups. + + If index_name is provided, the backups will be filtered by index. If no index_name is provided, all backups in the projectwill be returned. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + def describe_backup(self, *, backup_id: str) -> "BackupModel": + """Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + """ + pass + + @abstractmethod + def delete_backup(self, *, backup_id: str) -> None: + """Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + pass + + @abstractmethod + def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + """List restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + """Describe a restore job. + + Args: + job_id (str): The ID of the restore job to describe. + """ + pass + @abstractmethod def Index(self, name: str = "", host: str = "", **kwargs): """ diff --git a/pinecone/models/__init__.py b/pinecone/models/__init__.py index 86306c1e..fb94ddf5 100644 --- a/pinecone/models/__init__.py +++ b/pinecone/models/__init__.py @@ -1,20 +1,9 @@ -from .index_description import ServerlessSpecDefinition, PodSpecDefinition -from .collection_description import CollectionDescription -from .serverless_spec import ServerlessSpec -from .pod_spec import PodSpec -from .index_list import IndexList -from .collection_list import CollectionList -from .index_model import IndexModel -from .index_embed import IndexEmbed +import warnings -__all__ = [ - "CollectionDescription", - "PodSpec", - "PodSpecDefinition", - "ServerlessSpec", - "ServerlessSpecDefinition", - "IndexList", - "CollectionList", - "IndexModel", - "IndexEmbed", -] +from pinecone.db_control.models import * + +warnings.warn( + "The module at `pinecone.models` has moved to `pinecone.db_control.models`. " + "This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, +) diff --git a/pinecone/openapi_support/__init__.py b/pinecone/openapi_support/__init__.py index 63e3fb0a..890c3007 100644 --- a/pinecone/openapi_support/__init__.py +++ b/pinecone/openapi_support/__init__.py @@ -8,7 +8,7 @@ from .endpoint_utils import ExtraOpenApiKwargsTypedDict, KwargsWithOpenApiKwargDefaultsTypedDict from .asyncio_api_client import AsyncioApiClient from .asyncio_endpoint import AsyncioEndpoint -from .configuration import Configuration +from .configuration_lazy import Configuration from .exceptions import ( PineconeException, PineconeApiAttributeError, diff --git a/pinecone/openapi_support/api_client.py b/pinecone/openapi_support/api_client.py index 421d56cc..ee1e4649 100644 --- a/pinecone/openapi_support/api_client.py +++ b/pinecone/openapi_support/api_client.py @@ -1,14 +1,14 @@ import atexit -from multiprocessing.pool import ThreadPool -from concurrent.futures import ThreadPoolExecutor import io -from typing import Optional, List, Tuple, Dict, Any, Union -from .deserializer import Deserializer +from typing import Optional, List, Tuple, Dict, Any, Union, TYPE_CHECKING +if TYPE_CHECKING: + from multiprocessing.pool import ThreadPool + from concurrent.futures import ThreadPoolExecutor from .rest_urllib3 import Urllib3RestClient -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .exceptions import PineconeApiValueError, PineconeApiException from .api_client_utils import ( parameters_to_tuples, @@ -30,8 +30,8 @@ class ApiClient(object): to the API. More threads means more concurrent API requests. """ - _pool: Optional[ThreadPool] = None - _threadpool_executor: Optional[ThreadPoolExecutor] = None + _pool: Optional["ThreadPool"] = None + _threadpool_executor: Optional["ThreadPoolExecutor"] = None def __init__( self, configuration: Optional[Configuration] = None, pool_threads: Optional[int] = 1 @@ -64,18 +64,22 @@ def close(self): atexit.unregister(self.close) @property - def pool(self): + def pool(self) -> "ThreadPool": """Create thread pool on first request avoids instantiating unused threadpool for blocking clients. """ if self._pool is None: + from multiprocessing.pool import ThreadPool + atexit.register(self.close) self._pool = ThreadPool(self.pool_threads) return self._pool @property - def threadpool_executor(self): + def threadpool_executor(self) -> "ThreadPoolExecutor": if self._threadpool_executor is None: + from concurrent.futures import ThreadPoolExecutor + self._threadpool_executor = ThreadPoolExecutor(max_workers=self.pool_threads) return self._threadpool_executor @@ -186,6 +190,8 @@ def __call_api( # deserialize response data if response_type: + from .deserializer import Deserializer + Deserializer.decode_response(response_type=response_type, response=response_data) return_data = Deserializer.deserialize( response=response_data, diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index fef29cbb..403ff26f 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -1,5 +1,5 @@ # This file is generated by codegen/build-oas.sh # Do not edit this file manually. -API_VERSION = "2025-01" -APIS_REPO_SHA = "eb79d8ea0c146aebe36c3769e19cbe9618db2d54" +API_VERSION = "2025-04" +APIS_REPO_SHA = "7e21ca9adb6a530ce11909d6209d69551f86e9bd" diff --git a/pinecone/openapi_support/asyncio_api_client.py b/pinecone/openapi_support/asyncio_api_client.py index 51f2e0ce..43c8e17b 100644 --- a/pinecone/openapi_support/asyncio_api_client.py +++ b/pinecone/openapi_support/asyncio_api_client.py @@ -7,7 +7,7 @@ from .rest_aiohttp import AiohttpRestClient -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .exceptions import PineconeApiValueError, PineconeApiException from .api_client_utils import ( parameters_to_tuples, diff --git a/pinecone/openapi_support/configuration.py b/pinecone/openapi_support/configuration.py index fb6d7d19..e69de29b 100644 --- a/pinecone/openapi_support/configuration.py +++ b/pinecone/openapi_support/configuration.py @@ -1,441 +0,0 @@ -import copy -import logging -import multiprocessing - -from http import client as http_client -from .exceptions import PineconeApiValueError -from typing import TypedDict - - -class HostSetting(TypedDict): - url: str - description: str - - -JSON_SCHEMA_VALIDATION_KEYWORDS = { - "multipleOf", - "maximum", - "exclusiveMaximum", - "minimum", - "exclusiveMinimum", - "maxLength", - "minLength", - "pattern", - "maxItems", - "minItems", -} - - -class Configuration: - """Class to hold the configuration of the API client. - - :param host: Base url - :param api_key: Dict to store API key(s). - Each entry in the dict specifies an API key. - The dict key is the name of the security scheme in the OAS specification. - The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) - The dict key is the name of the security scheme in the OAS specification. - The dict value is an API key prefix when generating the auth data. - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. - :param server_operation_index: Mapping from operation ID to an index to server - configuration. - :param server_operation_variables: Mapping from operation ID to a mapping with - string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. - :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format - - :Example: - - API Key Authentication Example. - Given the following security scheme in the OpenAPI specification: - components: - securitySchemes: - cookieAuth: # name for the security scheme - type: apiKey - in: cookie - name: JSESSIONID # cookie name - - You can programmatically set the cookie: - - conf = pinecone.openapi_support.Configuration( - api_key={'cookieAuth': 'abc123'} - api_key_prefix={'cookieAuth': 'JSESSIONID'} - ) - - The following cookie will be added to the HTTP request: - Cookie: JSESSIONID abc123 - """ - - _default = None - - def __init__( - self, - host=None, - api_key=None, - api_key_prefix=None, - discard_unknown_keys=False, - disabled_client_side_validations="", - server_index=None, - server_variables=None, - server_operation_index=None, - server_operation_variables=None, - ssl_ca_cert=None, - ): - """Constructor""" - self._base_path = "https://api.pinecone.io" if host is None else host - """Default Base url - """ - self.server_index = 0 if server_index is None and host is None else server_index - self.server_operation_index = server_operation_index or {} - """Default server index - """ - self.server_variables = server_variables or {} - self.server_operation_variables = server_operation_variables or {} - """Default server variables - """ - self.temp_folder_path = None - """Temp file folder for downloading files - """ - # Authentication Settings - self.api_key = {} - if api_key: - self.api_key = api_key - """dict to store API key(s) - """ - self.api_key_prefix = {} - if api_key_prefix: - self.api_key_prefix = api_key_prefix - """dict to store API prefix (e.g. Bearer) - """ - self.refresh_api_key_hook = None - """function hook to refresh API key if expired - """ - self.discard_unknown_keys = discard_unknown_keys - self.disabled_client_side_validations = disabled_client_side_validations - self.logger = {} - """Logging Settings - """ - self.logger["package_logger"] = logging.getLogger("pinecone.openapi_support") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - self.logger_format = "%(asctime)s %(levelname)s %(message)s" - """Log format - """ - self.logger_stream_handler = None - """Log stream handler - """ - self.logger_file_handler = None - """Log file handler - """ - self.logger_file = None - """Debug file location - """ - self.debug = False - """Debug switch - """ - - self.verify_ssl = True - """SSL/TLS verification - Set this to false to skip verifying SSL certificate when calling API - from https server. - """ - self.ssl_ca_cert = ssl_ca_cert - """Set this to customize the certificate file to verify the peer. - """ - self.cert_file = None - """client certificate file - """ - self.key_file = None - """client key file - """ - self.assert_hostname = None - """Set this to True/False to enable/disable SSL hostname verification. - """ - - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - """urllib3 connection pool's maximum number of connections saved - per pool. urllib3 uses 1 connection as default value, but this is - not the best value when you are making a lot of possibly parallel - requests to the same host, which is often the case here. - cpu_count * 5 is used as default value to increase performance. - """ - - self.proxy = None - """Proxy URL - """ - self.proxy_headers = None - """Proxy headers - """ - self.safe_chars_for_path_param = "" - """Safe chars for path_param - """ - self.retries = None - """Adding retries to override urllib3 default value 3 - """ - # Enable client side validation - self.client_side_validation = True - - # Options to pass down to the underlying urllib3 socket - self.socket_options = None - - def __deepcopy__(self, memo): - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - if k not in ("logger", "logger_file_handler"): - setattr(result, k, copy.deepcopy(v, memo)) - # shallow copy of loggers - result.logger = copy.copy(self.logger) - # use setters to configure loggers - result.logger_file = self.logger_file - result.debug = self.debug - return result - - def __setattr__(self, name, value): - object.__setattr__(self, name, value) - if name == "disabled_client_side_validations": - s = set(filter(None, value.split(","))) - for v in s: - if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise PineconeApiValueError("Invalid keyword: '{0}''".format(v)) - self._disabled_client_side_validations = s - - @classmethod - def set_default(cls, default): - """Set default instance of configuration. - - It stores default configuration, which can be - returned by get_default_copy method. - - :param default: object of Configuration - """ - cls._default = copy.deepcopy(default) - - @classmethod - def get_default_copy(cls): - """Return new instance of configuration. - - This method returns newly created, based on default constructor, - object of Configuration class or returns a copy of default - configuration passed by the set_default method. - - :return: The configuration object. - """ - if cls._default is not None: - return copy.deepcopy(cls._default) - return Configuration() - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in self.logger.items(): - logger.addHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in self.logger.items(): - logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in self.logger.items(): - logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier, alias=None): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :param alias: The alternative identifier of apiKey. - :return: The token for api key authentication. - """ - if self.refresh_api_key_hook is not None: - self.refresh_api_key_hook(self) - key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) - if key: - prefix = self.api_key_prefix.get(identifier) - if prefix: - return "%s %s" % (prefix, key) - else: - return key - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - auth = {} - if "ApiKeyAuth" in self.api_key: - auth["ApiKeyAuth"] = { - "type": "api_key", - "in": "header", - "key": "Api-Key", - "value": self.get_api_key_with_prefix("ApiKeyAuth"), - } - return auth - - def get_host_settings(self): - """Gets an array of host settings - - :return: An array of host settings - """ - return [{"url": "https://api.pinecone.io", "description": "Production API endpoints"}] - - def get_host_from_settings(self, index, variables=None, servers=None): - """Gets host URL based on the index and variables - :param index: array index of the host settings - :param variables: hash of variable and the corresponding value - :param servers: an array of host settings or None - :return: URL based on host settings - """ - if index is None: - return self._base_path - - variables = {} if variables is None else variables - servers = self.get_host_settings() if servers is None else servers - - try: - server = servers[index] - except IndexError: - raise ValueError( - "Invalid index {0} when selecting the host settings. Must be less than {1}".format( - index, len(servers) - ) - ) - - url = server["url"] - - # go through variables and replace placeholders - for variable_name, variable in server.get("variables", {}).items(): - used_value = variables.get(variable_name, variable["default_value"]) - - if "enum_values" in variable and used_value not in variable["enum_values"]: - raise ValueError( - "The variable `{0}` in the host URL has invalid value {1}. Must be {2}.".format( - variable_name, variables[variable_name], variable["enum_values"] - ) - ) - - url = url.replace("{" + variable_name + "}", used_value) - - return url - - @property - def host(self): - """Return generated host.""" - return self.get_host_from_settings(self.server_index, variables=self.server_variables) - - @host.setter - def host(self, value): - """Fix base path.""" - self._base_path = value - self.server_index = None - - def __repr__(self): - attrs = [ - f"host={self.host}", - "api_key=***", - f"api_key_prefix={self.api_key_prefix}", - f"connection_pool_maxsize={self.connection_pool_maxsize}", - f"discard_unknown_keys={self.discard_unknown_keys}", - f"disabled_client_side_validations={self.disabled_client_side_validations}", - f"server_index={self.server_index}", - f"server_variables={self.server_variables}", - f"server_operation_index={self.server_operation_index}", - f"server_operation_variables={self.server_operation_variables}", - f"ssl_ca_cert={self.ssl_ca_cert}", - ] - return f"Configuration({', '.join(attrs)})" diff --git a/pinecone/openapi_support/configuration_lazy.py b/pinecone/openapi_support/configuration_lazy.py new file mode 100644 index 00000000..27e90a34 --- /dev/null +++ b/pinecone/openapi_support/configuration_lazy.py @@ -0,0 +1,7 @@ +""" +Lazy import for the Configuration class to avoid loading the entire openapi_support package. +""" + +from ..config.openapi_configuration import Configuration + +__all__ = ["Configuration"] diff --git a/pinecone/openapi_support/endpoint_utils.py b/pinecone/openapi_support/endpoint_utils.py index 13522e85..867232b6 100644 --- a/pinecone/openapi_support/endpoint_utils.py +++ b/pinecone/openapi_support/endpoint_utils.py @@ -2,7 +2,7 @@ from .exceptions import PineconeApiTypeError, PineconeApiValueError from typing import Optional, Dict, Tuple, TypedDict, List, Literal, Any from .types import PropertyValidationTypedDict -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .model_utils import validate_and_convert_types, check_allowed_values, check_validations diff --git a/pinecone/openapi_support/exceptions.py b/pinecone/openapi_support/exceptions.py index fcc37da3..c9fcc571 100644 --- a/pinecone/openapi_support/exceptions.py +++ b/pinecone/openapi_support/exceptions.py @@ -1,140 +1 @@ -class PineconeException(Exception): - """The base exception class for all exceptions in the Pinecone Python SDK""" - - -class PineconeApiTypeError(PineconeException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: - """Raises an exception for TypeErrors - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list): a list of keys an indices to get to the - current_item - None if unset - valid_classes (tuple): the primitive classes that current item - should be an instance of - None if unset - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - None if unset - """ - self.path_to_item = path_to_item - self.valid_classes = valid_classes - self.key_type = key_type - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiTypeError, self).__init__(full_msg) - - -class PineconeApiValueError(PineconeException, ValueError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (list) the path to the exception in the - received_data dict. None if unset - """ - - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiValueError, self).__init__(full_msg) - - -class PineconeApiAttributeError(PineconeException, AttributeError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Raised when an attribute reference or assignment fails. - - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiAttributeError, self).__init__(full_msg) - - -class PineconeApiKeyError(PineconeException, KeyError): - def __init__(self, msg, path_to_item=None) -> None: - """ - Args: - msg (str): the exception message - - Keyword Args: - path_to_item (None/list) the path to the exception in the - received_data dict - """ - self.path_to_item = path_to_item - full_msg = msg - if path_to_item: - full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) - super(PineconeApiKeyError, self).__init__(full_msg) - - -class PineconeApiException(PineconeException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """Custom error messages for exception""" - error_message = "({0})\nReason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format(self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message - - -class NotFoundException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(NotFoundException, self).__init__(status, reason, http_resp) - - -class UnauthorizedException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(UnauthorizedException, self).__init__(status, reason, http_resp) - - -class ForbiddenException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ForbiddenException, self).__init__(status, reason, http_resp) - - -class ServiceException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None) -> None: - super(ServiceException, self).__init__(status, reason, http_resp) - - -def render_path(path_to_item): - """Returns a string representation of a path""" - result = "" - for pth in path_to_item: - if isinstance(pth, int): - result += "[{0}]".format(pth) - else: - result += "['{0}']".format(pth) - return result +from pinecone.exceptions import * # noqa: F403 diff --git a/pinecone/openapi_support/model_utils.py b/pinecone/openapi_support/model_utils.py index 4fc4cf0f..54cd9068 100644 --- a/pinecone/openapi_support/model_utils.py +++ b/pinecone/openapi_support/model_utils.py @@ -1,5 +1,4 @@ from datetime import date, datetime # noqa: F401 -from dateutil.parser import parse import inspect import io @@ -145,9 +144,17 @@ def set_attribute(self, name, value): self._check_type, configuration=self._configuration, ) - if (name,) in self.allowed_values: + if (name,) in self.allowed_values and self._enforce_allowed_values: + # Disabling allowed_value validation on response makes the SDK + # less fragile if unexpected values are returned. For example, if + # an unexpected index status is returned, we don't want to break + # when listing indexes due to validation on the status field against + # the allowed values in the enum. check_allowed_values(self.allowed_values, (name,), value) - if (name,) in self.validations: + if (name,) in self.validations and self._enforce_validations: + # Disabling validation on response makes the SDK + # less fragile if unexpected values are returned. In general, + # we want the SDK to display whatever is returned by the API. check_validations(self.validations, (name,), value, self._configuration) self.__dict__["_data_store"][name] = value @@ -1149,6 +1156,8 @@ def deserialize_primitive(data, klass, path_to_item): additional_message = "" try: if klass in {datetime, date}: + from dateutil.parser import parse + additional_message = ( "If you need your parameter to have a fallback " "string value, please set its type as `type: {}` in your " diff --git a/pinecone/openapi_support/rest_aiohttp.py b/pinecone/openapi_support/rest_aiohttp.py index c7121a11..8b84e850 100644 --- a/pinecone/openapi_support/rest_aiohttp.py +++ b/pinecone/openapi_support/rest_aiohttp.py @@ -2,13 +2,15 @@ import certifi import json from .rest_utils import RestClientInterface, RESTResponse, raise_exceptions_or_return -from .configuration import Configuration +from ..config.openapi_configuration import Configuration class AiohttpRestClient(RestClientInterface): def __init__(self, configuration: Configuration) -> None: try: import aiohttp + from aiohttp_retry import RetryClient + from .retry_aiohttp import JitterRetry except ImportError: raise ImportError( "Additional dependencies are required to use Pinecone with asyncio. Include these extra dependencies in your project by installing `pinecone[asyncio]`." @@ -28,8 +30,21 @@ def __init__(self, configuration: Configuration) -> None: else: self._session = aiohttp.ClientSession(connector=conn) + if configuration.retries is not None: + retry_options = configuration.retries + else: + retry_options = JitterRetry( + attempts=5, + start_timeout=0.1, + max_timeout=3.0, + statuses={500, 502, 503, 504}, + methods=None, # retry on all methods + exceptions={aiohttp.ClientError, aiohttp.ServerDisconnectedError}, + ) + self._retry_client = RetryClient(client_session=self._session, retry_options=retry_options) + async def close(self): - await self._session.close() + await self._retry_client.close() async def request( self, @@ -48,7 +63,7 @@ async def request( if "application/x-ndjson" in headers.get("Content-Type", "").lower(): ndjson_data = "\n".join(json.dumps(record) for record in body) - async with self._session.request( + async with self._retry_client.request( method, url, params=query_params, headers=headers, data=ndjson_data ) as resp: content = await resp.read() @@ -57,7 +72,7 @@ async def request( ) else: - async with self._session.request( + async with self._retry_client.request( method, url, params=query_params, headers=headers, json=body ) as resp: content = await resp.read() diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index 85d008da..3f718347 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -4,11 +4,11 @@ import os from typing import Optional from urllib.parse import urlencode, quote -from .configuration import Configuration +from ..config.openapi_configuration import Configuration from .rest_utils import raise_exceptions_or_return, RESTResponse, RestClientInterface import urllib3 - +from .retry_urllib3 import JitterRetry from .exceptions import PineconeApiException, PineconeApiValueError @@ -52,6 +52,13 @@ def __init__( if configuration.retries is not None: addition_pool_args["retries"] = configuration.retries + else: + addition_pool_args["retries"] = JitterRetry( + total=5, + backoff_factor=0.25, + status_forcelist=(500, 502, 503, 504), + allowed_methods=None, + ) if configuration.socket_options is not None: addition_pool_args["socket_options"] = configuration.socket_options diff --git a/pinecone/openapi_support/retry_aiohttp.py b/pinecone/openapi_support/retry_aiohttp.py new file mode 100644 index 00000000..2b3019e7 --- /dev/null +++ b/pinecone/openapi_support/retry_aiohttp.py @@ -0,0 +1,44 @@ +import random +from typing import Optional +from aiohttp_retry import RetryOptionsBase, EvaluateResponseCallbackType, ClientResponse +import logging + +logger = logging.getLogger(__name__) + + +class JitterRetry(RetryOptionsBase): + """https://github.com/inyutin/aiohttp_retry/issues/44.""" + + def __init__( + self, + attempts: int = 3, # How many times we should retry + start_timeout: float = 0.1, # Base timeout time, then it exponentially grow + max_timeout: float = 5.0, # Max possible timeout between tries + statuses: Optional[set[int]] = None, # On which statuses we should retry + exceptions: Optional[set[type[Exception]]] = None, # On which exceptions we should retry + methods: Optional[set[str]] = None, # On which HTTP methods we should retry + retry_all_server_errors: bool = True, + evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None, + ) -> None: + super().__init__( + attempts=attempts, + statuses=statuses, + exceptions=exceptions, + methods=methods, + retry_all_server_errors=retry_all_server_errors, + evaluate_response_callback=evaluate_response_callback, + ) + + self._start_timeout: float = start_timeout + self._max_timeout: float = max_timeout + + def get_timeout( + self, + attempt: int, + response: Optional[ClientResponse] = None, # noqa: ARG002 + ) -> float: + logger.debug(f"JitterRetry get_timeout: attempt={attempt}, response={response}") + """Return timeout with exponential backoff.""" + jitter = random.uniform(0, 0.1) + timeout = self._start_timeout * (2 ** (attempt - 1)) + return min(timeout + jitter, self._max_timeout) diff --git a/pinecone/openapi_support/retry_urllib3.py b/pinecone/openapi_support/retry_urllib3.py new file mode 100644 index 00000000..2b91a31d --- /dev/null +++ b/pinecone/openapi_support/retry_urllib3.py @@ -0,0 +1,21 @@ +import random +from urllib3.util.retry import Retry +import logging + +logger = logging.getLogger(__name__) + + +class JitterRetry(Retry): + """ + Retry with exponential back‑off with jitter. + + The Retry class is being extended as built-in support for jitter was added only in urllib3 2.0.0. + Jitter logic is following the official implementation with a constant jitter factor: https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py + """ + + def get_backoff_time(self) -> float: + backoff_value = super().get_backoff_time() + jitter = random.random() * 0.25 + backoff_value += jitter + logger.debug(f"Calculating retry backoff: {backoff_value} (jitter: {jitter})") + return backoff_value diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py new file mode 100644 index 00000000..60f02e9b --- /dev/null +++ b/pinecone/pinecone.py @@ -0,0 +1,392 @@ +import logging +from typing import Optional, Dict, Union, TYPE_CHECKING +from multiprocessing import cpu_count +import warnings + +from pinecone.config import PineconeConfig, ConfigBuilder + +from .legacy_pinecone_interface import LegacyPineconeDBControlInterface + +from pinecone.utils import normalize_host, PluginAware, docslinks, require_kwargs +from .langchain_import_warnings import _build_langchain_attribute_error_message + +logger = logging.getLogger(__name__) +""" @private """ + +if TYPE_CHECKING: + from pinecone.config import Config, OpenApiConfiguration + from pinecone.db_data import ( + _Index as Index, + _Inference as Inference, + _IndexAsyncio as IndexAsyncio, + ) + from pinecone.db_control import DBControl + from pinecone.db_control.index_host_store import IndexHostStore + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexModel, + IndexList, + CollectionList, + IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, + ) + + +class Pinecone(PluginAware, LegacyPineconeDBControlInterface): + """ + A client for interacting with Pinecone APIs. + """ + + def __init__( + self, + api_key: Optional[str] = None, + host: Optional[str] = None, + proxy_url: Optional[str] = None, + proxy_headers: Optional[Dict[str, str]] = None, + ssl_ca_certs: Optional[str] = None, + ssl_verify: Optional[bool] = None, + additional_headers: Optional[Dict[str, str]] = {}, + pool_threads: Optional[int] = None, + **kwargs, + ): + for deprecated_kwarg in {"config", "openapi_config", "index_api"}: + if deprecated_kwarg in kwargs: + raise NotImplementedError( + f"Passing {deprecated_kwarg} is no longer supported. Please pass individual settings such as proxy_url, proxy_headers, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at {docslinks['README']} for examples." + ) + + self._config = PineconeConfig.build( + api_key=api_key, + host=host, + additional_headers=additional_headers, + proxy_url=proxy_url, + proxy_headers=proxy_headers, + ssl_ca_certs=ssl_ca_certs, + ssl_verify=ssl_verify, + **kwargs, + ) + """ @private """ + + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) + """ @private """ + + if pool_threads is None: + self._pool_threads = 5 * cpu_count() + """ @private """ + else: + self._pool_threads = pool_threads + """ @private """ + + self._inference: Optional["Inference"] = None # Lazy initialization + """ @private """ + + self._db_control: Optional["DBControl"] = None # Lazy initialization + """ @private """ + + super().__init__() # Initialize PluginAware + + @property + def inference(self) -> "Inference": + """ + Inference is a namespace where an instance of the `pinecone.data.features.inference.inference.Inference` class is lazily created and cached. + """ + if self._inference is None: + from pinecone.inference import Inference + + self._inference = Inference( + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._inference + + @property + def db(self) -> "DBControl": + """ + DBControl is a namespace where an instance of the `pinecone.control.db_control.DBControl` class is lazily created and cached. + """ + if self._db_control is None: + from pinecone.db_control import DBControl + + self._db_control = DBControl( + config=self._config, + openapi_config=self._openapi_config, + pool_threads=self._pool_threads, + ) + return self._db_control + + @property + def index_host_store(self) -> "IndexHostStore": + """@private""" + warnings.warn( + "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db.index._index_host_store + + @property + def config(self) -> "Config": + """@private""" + # The config property is considered private, but the name cannot be changed to include underscore + # without breaking compatibility with plugins in the wild. + return self._config + + @property + def openapi_config(self) -> "OpenApiConfiguration": + """@private""" + warnings.warn( + "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._openapi_config + + @property + def pool_threads(self) -> int: + """@private""" + warnings.warn( + "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self._pool_threads + + @property + def index_api(self) -> "ManageIndexesApi": + """@private""" + warnings.warn( + "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db._index_api + + def create_index( + self, + name: str, + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], + dimension: Optional[int] = None, + metric: Optional[Union["Metric", str]] = "cosine", + timeout: Optional[int] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", + tags: Optional[Dict[str, str]] = None, + ) -> "IndexModel": + return self.db.index.create( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + timeout=timeout, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + ) + + def create_index_for_model( + self, + name: str, + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + timeout: Optional[int] = None, + ) -> "IndexModel": + return self.db.index.create_for_model( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + timeout=timeout, + ) + + @require_kwargs + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + return self.db.index.create_from_backup( + name=name, + backup_id=backup_id, + deletion_protection=deletion_protection, + tags=tags, + timeout=timeout, + ) + + def delete_index(self, name: str, timeout: Optional[int] = None): + return self.db.index.delete(name=name, timeout=timeout) + + def list_indexes(self) -> "IndexList": + return self.db.index.list() + + def describe_index(self, name: str) -> "IndexModel": + return self.db.index.describe(name=name) + + def has_index(self, name: str) -> bool: + return self.db.index.has(name=name) + + def configure_index( + self, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, + tags: Optional[Dict[str, str]] = None, + ): + return self.db.index.configure( + name=name, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + + def create_collection(self, name: str, source: str) -> None: + return self.db.collection.create(name=name, source=source) + + def list_collections(self) -> "CollectionList": + return self.db.collection.list() + + def delete_collection(self, name: str) -> None: + return self.db.collection.delete(name=name) + + def describe_collection(self, name: str): + return self.db.collection.describe(name=name) + + @require_kwargs + def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + return self.db.backup.create( + index_name=index_name, backup_name=backup_name, description=description + ) + + @require_kwargs + def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + return self.db.backup.list( + index_name=index_name, limit=limit, pagination_token=pagination_token + ) + + @require_kwargs + def describe_backup(self, *, backup_id: str) -> "BackupModel": + return self.db.backup.describe(backup_id=backup_id) + + @require_kwargs + def delete_backup(self, *, backup_id: str) -> None: + return self.db.backup.delete(backup_id=backup_id) + + @require_kwargs + def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + return self.db.restore_job.list(limit=limit, pagination_token=pagination_token) + + @require_kwargs + def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + return self.db.restore_job.describe(job_id=job_id) + + @staticmethod + def from_texts(*args, **kwargs): + """@private""" + raise AttributeError(_build_langchain_attribute_error_message("from_texts")) + + @staticmethod + def from_documents(*args, **kwargs): + """@private""" + raise AttributeError(_build_langchain_attribute_error_message("from_documents")) + + def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": + from pinecone.db_data import _Index + + if name == "" and host == "": + raise ValueError("Either name or host must be specified") + + pt = kwargs.pop("pool_threads", None) or self._pool_threads + api_key = self._config.api_key + openapi_config = self._openapi_config + + if host != "": + check_realistic_host(host) + + # Use host url if it is provided + index_host = normalize_host(host) + else: + # Otherwise, get host url from describe_index using the index name + index_host = self.db.index._get_host(name) + + return _Index( + host=index_host, + api_key=api_key, + pool_threads=pt, + openapi_config=openapi_config, + source_tag=self.config.source_tag, + **kwargs, + ) + + def IndexAsyncio(self, host: str, **kwargs) -> "IndexAsyncio": + from pinecone.db_data import _IndexAsyncio + + api_key = self._config.api_key + openapi_config = self._openapi_config + + if host is None or host == "": + raise ValueError("A host must be specified") + + check_realistic_host(host) + index_host = normalize_host(host) + + return _IndexAsyncio( + host=index_host, + api_key=api_key, + openapi_config=openapi_config, + source_tag=self.config.source_tag, + **kwargs, + ) + + +def check_realistic_host(host: str) -> None: + """@private + + Checks whether a user-provided host string seems plausible. + Someone could erroneously pass an index name as the host by + mistake, and if they have done that we'd like to give them a + simple error message as feedback rather than attempting to + call the url and getting a more cryptic DNS resolution error. + """ + + if "." not in host and "localhost" not in host: + raise ValueError( + f"You passed '{host}' as the host but this does not appear to be valid. Call describe_index() to confirm the host of the index." + ) diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py new file mode 100644 index 00000000..b8bd7ad5 --- /dev/null +++ b/pinecone/pinecone_asyncio.py @@ -0,0 +1,352 @@ +import logging +import warnings +from typing import Optional, Dict, Union, TYPE_CHECKING + +from pinecone.config import PineconeConfig, ConfigBuilder + +from pinecone.utils import normalize_host, require_kwargs, docslinks + +from .pinecone_interface_asyncio import PineconeAsyncioDBControlInterface +from .pinecone import check_realistic_host + +if TYPE_CHECKING: + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict + from pinecone.db_data import _IndexAsyncio + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexModel, + IndexList, + CollectionList, + IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, + ) + from pinecone.core.openapi.db_control.api.manage_indexes_api import AsyncioManageIndexesApi + from pinecone.db_control.index_host_store import IndexHostStore + +logger = logging.getLogger(__name__) +""" @private """ + + +class PineconeAsyncio(PineconeAsyncioDBControlInterface): + """ + `PineconeAsyncio` is an asyncio client for interacting with Pinecone's control plane API. + + This class implements methods for managing and interacting with Pinecone resources + such as collections and indexes. + + To perform data operations such as inserting and querying vectors, use the `IndexAsyncio` class. + + ```python + import asyncio + from pinecone import Pinecone + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="my-index.pinecone.io") as idx: + await idx.upsert(vectors=[(1, [1, 2, 3]), (2, [4, 5, 6])]) + + asyncio.run(main()) + ``` + """ + + def __init__( + self, + api_key: Optional[str] = None, + host: Optional[str] = None, + proxy_url: Optional[str] = None, + # proxy_headers: Optional[Dict[str, str]] = None, + ssl_ca_certs: Optional[str] = None, + ssl_verify: Optional[bool] = None, + additional_headers: Optional[Dict[str, str]] = {}, + **kwargs, + ): + for deprecated_kwarg in {"config", "openapi_config"}: + if deprecated_kwarg in kwargs: + raise NotImplementedError( + f"Passing {deprecated_kwarg} is no longer supported. Please pass individual settings such as proxy_url, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at {docslinks['README']} for examples." + ) + + for unimplemented_kwarg in {"proxy_headers"}: + if unimplemented_kwarg in kwargs: + raise NotImplementedError( + f"You have passed {unimplemented_kwarg} but this configuration has not been implemented for PineconeAsyncio." + ) + + self._config = PineconeConfig.build( + api_key=api_key, + host=host, + additional_headers=additional_headers, + proxy_url=proxy_url, + proxy_headers=None, + ssl_ca_certs=ssl_ca_certs, + ssl_verify=ssl_verify, + **kwargs, + ) + """ @private """ + + self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) + """ @private """ + + self._inference = None # Lazy initialization + """ @private """ + + self._db_control = None # Lazy initialization + """ @private """ + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() + + async def close(self): + """Cleanup resources used by the Pinecone client. + + This method should be called when the client is no longer needed so that + it can cleanup the aioahttp session and other resources. + + After close has been called, the client instance should not be used. + + ```python + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + pc = PineconeAsyncio() + desc = await pc.describe_index(name="my-index") + await pc.close() + + asyncio.run(main()) + ``` + + If you are using the client as a context manager, the close method is called automatically + when exiting. + + ```python + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio() as pc: + desc = await pc.describe_index(name="my-index") + + # No need to call close in this case because the "async with" syntax + # automatically calls close when exiting the block. + asyncio.run(main()) + ``` + + """ + await self.db._index_api.api_client.close() + + @property + def inference(self): + """Dynamically create and cache the AsyncioInference instance.""" + if self._inference is None: + from pinecone.inference import AsyncioInference + + self._inference = AsyncioInference(api_client=self.db._index_api.api_client) + return self._inference + + @property + def db(self): + if self._db_control is None: + from .db_control.db_control_asyncio import DBControlAsyncio + + self._db_control = DBControlAsyncio( + config=self._config, openapi_config=self._openapi_config + ) + return self._db_control + + @property + def index_host_store(self) -> "IndexHostStore": + """@private""" + warnings.warn( + "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db.index._index_host_store + + @property + def index_api(self) -> "AsyncioManageIndexesApi": + """@private""" + warnings.warn( + "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", + DeprecationWarning, + stacklevel=2, + ) + return self.db._index_api + + async def create_index( + self, + name: str, + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], + dimension: Optional[int] = None, + metric: Optional[Union["Metric", str]] = "cosine", + timeout: Optional[int] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", + tags: Optional[Dict[str, str]] = None, + ) -> "IndexModel": + resp = await self.db.index.create( + name=name, + spec=spec, + dimension=dimension, + metric=metric, + deletion_protection=deletion_protection, + vector_type=vector_type, + tags=tags, + timeout=timeout, + ) + return resp + + async def create_index_for_model( + self, + name: str, + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], + tags: Optional[Dict[str, str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + timeout: Optional[int] = None, + ) -> "IndexModel": + return await self.db.index.create_for_model( + name=name, + cloud=cloud, + region=region, + embed=embed, + tags=tags, + deletion_protection=deletion_protection, + timeout=timeout, + ) + + @require_kwargs + async def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + return await self.db.index.create_from_backup( + name=name, + backup_id=backup_id, + deletion_protection=deletion_protection, + tags=tags, + timeout=timeout, + ) + + async def delete_index(self, name: str, timeout: Optional[int] = None): + return await self.db.index.delete(name=name, timeout=timeout) + + async def list_indexes(self) -> "IndexList": + return await self.db.index.list() + + async def describe_index(self, name: str) -> "IndexModel": + return await self.db.index.describe(name=name) + + async def has_index(self, name: str) -> bool: + return await self.db.index.has(name=name) + + async def configure_index( + self, + name: str, + replicas: Optional[int] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, + tags: Optional[Dict[str, str]] = None, + ): + return await self.db.index.configure( + name=name, + replicas=replicas, + pod_type=pod_type, + deletion_protection=deletion_protection, + tags=tags, + ) + + async def create_collection(self, name: str, source: str): + return await self.db.collection.create(name=name, source=source) + + async def list_collections(self) -> "CollectionList": + return await self.db.collection.list() + + async def delete_collection(self, name: str): + return await self.db.collection.delete(name=name) + + async def describe_collection(self, name: str): + return await self.db.collection.describe(name=name) + + @require_kwargs + async def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + return await self.db.backup.create( + index_name=index_name, backup_name=backup_name, description=description + ) + + @require_kwargs + async def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + return await self.db.backup.list( + index_name=index_name, limit=limit, pagination_token=pagination_token + ) + + @require_kwargs + async def describe_backup(self, *, backup_id: str) -> "BackupModel": + return await self.db.backup.describe(backup_id=backup_id) + + @require_kwargs + async def delete_backup(self, *, backup_id: str) -> None: + return await self.db.backup.delete(backup_id=backup_id) + + @require_kwargs + async def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + return await self.db.restore_job.list(limit=limit, pagination_token=pagination_token) + + @require_kwargs + async def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + return await self.db.restore_job.describe(job_id=job_id) + + def IndexAsyncio(self, host: str, **kwargs) -> "_IndexAsyncio": + from pinecone.db_data import _IndexAsyncio + + api_key = self._config.api_key + openapi_config = self._openapi_config + + if host is None or host == "": + raise ValueError("A host must be specified") + + check_realistic_host(host) + index_host = normalize_host(host) + + return _IndexAsyncio( + host=index_host, + api_key=api_key, + openapi_config=openapi_config, + source_tag=self._config.source_tag, + **kwargs, + ) diff --git a/pinecone/control/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py similarity index 86% rename from pinecone/control/pinecone_interface_asyncio.py rename to pinecone/pinecone_interface_asyncio.py index a732bce9..4b8e1cc1 100644 --- a/pinecone/control/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -1,32 +1,36 @@ from abc import ABC, abstractmethod -from typing import Optional, Dict, Union - - -from pinecone.config import Config - -from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi - - -from pinecone.models import ( - ServerlessSpec, - PodSpec, - IndexList, - CollectionList, - IndexModel, - IndexEmbed, -) -from pinecone.enums import ( - Metric, - VectorType, - DeletionProtection, - PodType, - CloudProvider, - AwsRegion, - GcpRegion, - AzureRegion, -) -from .types import CreateIndexForModelEmbedTypedDict +from typing import Optional, Dict, Union, TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.config import Config + + from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + + from pinecone.db_control.models import ( + ServerlessSpec, + PodSpec, + ByocSpec, + IndexList, + CollectionList, + IndexModel, + IndexEmbed, + BackupModel, + BackupList, + RestoreJobModel, + RestoreJobList, + ) + from pinecone.db_control.enums import ( + Metric, + VectorType, + DeletionProtection, + PodType, + CloudProvider, + AwsRegion, + GcpRegion, + AzureRegion, + ) + from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict class PineconeAsyncioDBControlInterface(ABC): @@ -39,10 +43,10 @@ def __init__( proxy_headers: Optional[Dict[str, str]] = None, ssl_ca_certs: Optional[str] = None, ssl_verify: Optional[bool] = None, - config: Optional[Config] = None, + config: Optional["Config"] = None, additional_headers: Optional[Dict[str, str]] = {}, pool_threads: Optional[int] = 1, - index_api: Optional[ManageIndexesApi] = None, + index_api: Optional["ManageIndexesApi"] = None, **kwargs, ): """ @@ -291,12 +295,12 @@ async def main(): async def create_index( self, name: str, - spec: Union[Dict, ServerlessSpec, PodSpec], + spec: Union[Dict, "ServerlessSpec", "PodSpec", "ByocSpec"], dimension: Optional[int], - metric: Optional[Union[Metric, str]] = Metric.COSINE, + metric: Optional[Union["Metric", str]] = "cosine", timeout: Optional[int] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, - vector_type: Optional[Union[VectorType, str]] = VectorType.DENSE, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + vector_type: Optional[Union["VectorType", str]] = "dense", tags: Optional[Dict[str, str]] = None, ): """Creates a Pinecone index. @@ -408,13 +412,13 @@ async def main(): async def create_index_for_model( self, name: str, - cloud: Union[CloudProvider, str], - region: Union[AwsRegion, GcpRegion, AzureRegion, str], - embed: Union[IndexEmbed, CreateIndexForModelEmbedTypedDict], + cloud: Union["CloudProvider", str], + region: Union["AwsRegion", "GcpRegion", "AzureRegion", str], + embed: Union["IndexEmbed", "CreateIndexForModelEmbedTypedDict"], tags: Optional[Dict[str, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = DeletionProtection.DISABLED, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", timeout: Optional[int] = None, - ) -> IndexModel: + ) -> "IndexModel": """ :param name: The name of the index to create. Must be unique within your project and cannot be changed once created. Allowed characters are lowercase letters, numbers, @@ -479,6 +483,36 @@ async def main(): """ pass + @abstractmethod + def create_index_from_backup( + self, + *, + name: str, + backup_id: str, + deletion_protection: Optional[Union["DeletionProtection", str]] = "disabled", + tags: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, + ) -> "IndexModel": + """ + Create an index from a backup. + + Call `list_backups` to get a list of backups for your project. + + :param name: The name of the index to create. + :type name: str + :param backup_id: The ID of the backup to restore. + :type backup_id: str + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :type deletion_protection: Optional[Literal["enabled", "disabled"]] + :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. + :type tags: Optional[Dict[str, str]] + :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; + if -1, return immediately and do not wait. + :return: A description of the index that was created. + :rtype: IndexModel + """ + pass + @abstractmethod async def delete_index(self, name: str, timeout: Optional[int] = None): """ @@ -533,7 +567,7 @@ async def main(): pass @abstractmethod - async def list_indexes(self) -> IndexList: + async def list_indexes(self) -> "IndexList": """ :return: Returns an `IndexList` object, which is iterable and contains a list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` @@ -574,7 +608,7 @@ async def main(): pass @abstractmethod - async def describe_index(self, name: str) -> IndexModel: + async def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. :return: Returns an `IndexModel` object @@ -669,8 +703,8 @@ async def configure_index( self, name: str, replicas: Optional[int] = None, - pod_type: Optional[Union[PodType, str]] = None, - deletion_protection: Optional[Union[DeletionProtection, str]] = None, + pod_type: Optional[Union["PodType", str]] = None, + deletion_protection: Optional[Union["DeletionProtection", str]] = None, tags: Optional[Dict[str, str]] = None, ): """ @@ -769,6 +803,77 @@ async def main(): """ pass + @abstractmethod + async def create_backup( + self, *, index_name: str, backup_name: str, description: str = "" + ) -> "BackupModel": + """Create a backup of an index. + + Args: + index_name (str): The name of the index to backup. + backup_name (str): The name to give the backup. + description (str): Optional description of the backup. + """ + pass + + @abstractmethod + async def list_backups( + self, + *, + index_name: Optional[str] = None, + limit: Optional[int] = 10, + pagination_token: Optional[str] = None, + ) -> "BackupList": + """List backups. + + If index_name is provided, the backups will be filtered by index. If no index_name is provided, all backups in the projectwill be returned. + + Args: + index_name (str): The name of the index to list backups for. + limit (int): The maximum number of backups to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + async def describe_backup(self, *, backup_id: str) -> "BackupModel": + """Describe a backup. + + Args: + backup_id (str): The ID of the backup to describe. + """ + pass + + @abstractmethod + async def delete_backup(self, *, backup_id: str) -> None: + """Delete a backup. + + Args: + backup_id (str): The ID of the backup to delete. + """ + pass + + @abstractmethod + async def list_restore_jobs( + self, *, limit: Optional[int] = 10, pagination_token: Optional[str] = None + ) -> "RestoreJobList": + """List restore jobs. + + Args: + limit (int): The maximum number of restore jobs to return. + pagination_token (str): The pagination token to use for pagination. + """ + pass + + @abstractmethod + async def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": + """Describe a restore job. + + Args: + job_id (str): The ID of the restore job to describe. + """ + pass + @abstractmethod async def create_collection(self, name: str, source: str): """Create a collection from a pod-based index @@ -779,7 +884,7 @@ async def create_collection(self, name: str, source: str): pass @abstractmethod - async def list_collections(self) -> CollectionList: + async def list_collections(self) -> "CollectionList": """List all collections ```python diff --git a/pinecone/py.typed b/pinecone/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/pinecone/scripts/repl.py b/pinecone/scripts/repl.py new file mode 100644 index 00000000..7fb4058a --- /dev/null +++ b/pinecone/scripts/repl.py @@ -0,0 +1,56 @@ +import code +import logging +from pinecone.utils.repr_overrides import setup_readline_history + + +def setup_logging(): + # Create a custom formatter + formatter = logging.Formatter( + fmt="%(asctime)s | %(levelname)-8s | %(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + + # Create and configure the console handler + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + + # Configure the root logger + root_logger = logging.getLogger() + root_logger.setLevel(logging.INFO) + root_logger.addHandler(console_handler) + + return root_logger + + +def main(): + # Set up logging + logger = setup_logging() + logger.info("Initializing environment...") + + # Set up readline history + setup_readline_history() + + # You can add any setup code here, such as: + # - Setting environment variables + # - Importing commonly used modules + # - Loading configuration files + + # Start the interactive REPL + banner = """ + Welcome to the custom Python REPL! + Your initialization steps have been completed. + """ + + # Create a custom namespace with any pre-loaded variables + namespace = { + "__name__": "__main__", + "__doc__": None, + "logger": logger, # Make logger available in REPL + # Add any other variables you want to have available in the REPL + } + + # Start the interactive console + code.interact(banner=banner, local=namespace) + + +if __name__ == "__main__": + main() diff --git a/pinecone/utils/__init__.py b/pinecone/utils/__init__.py index 3d72b5d6..33d286d8 100644 --- a/pinecone/utils/__init__.py +++ b/pinecone/utils/__init__.py @@ -17,6 +17,7 @@ from .error_handling import validate_and_convert_errors from .plugin_aware import PluginAware from .filter_dict import filter_dict +from .require_kwargs import require_kwargs __all__ = [ "PluginAware", @@ -36,4 +37,5 @@ "validate_and_convert_errors", "convert_enum_to_string", "filter_dict", + "require_kwargs", ] diff --git a/pinecone/utils/docslinks.py b/pinecone/utils/docslinks.py index a86dd1da..cdfe66cd 100644 --- a/pinecone/utils/docslinks.py +++ b/pinecone/utils/docslinks.py @@ -1,10 +1,12 @@ -from pinecone.core.openapi.db_control import API_VERSION +def versioned_url(template: str): + return lambda version: template.format(version) + docslinks = { "README": "https://github.com/pinecone-io/pinecone-python-client/blob/main/README.md", "GITHUB_REPO": "https://github.com/pinecone-io/pinecone-python-client", "LANGCHAIN_IMPORT_KB_ARTICLE": "https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain", - "API_DESCRIBE_INDEX": "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index".format( - API_VERSION + "API_DESCRIBE_INDEX": versioned_url( + "https://docs.pinecone.io/reference/api/{}/control-plane/describe_index" ), } diff --git a/pinecone/utils/error_handling.py b/pinecone/utils/error_handling.py index 5cdaaaf4..c18090eb 100644 --- a/pinecone/utils/error_handling.py +++ b/pinecone/utils/error_handling.py @@ -1,7 +1,11 @@ import inspect from functools import wraps -from urllib3.exceptions import MaxRetryError, ProtocolError + +class ProtocolError(Exception): + """Raised when there is a protocol error in the connection.""" + + pass def validate_and_convert_errors(func): @@ -9,15 +13,21 @@ def validate_and_convert_errors(func): def inner_func(*args, **kwargs): try: return func(*args, **kwargs) - except MaxRetryError as e: - if isinstance(e.reason, ProtocolError): + except Exception as e: + # Lazy import of urllib3 exceptions + from urllib3.exceptions import MaxRetryError, ProtocolError as Urllib3ProtocolError + + if isinstance(e, MaxRetryError): + if isinstance(e.reason, Urllib3ProtocolError): + raise ProtocolError(f"Failed to connect to {e.url}") from e + else: + raise e from e + elif isinstance(e, Urllib3ProtocolError): raise ProtocolError( - f"Failed to connect to {e.url}; did you specify the correct index name?" + "Connection failed. Please verify that the index host is correct and accessible." ) from e else: - raise - except ProtocolError as e: - raise ProtocolError("Failed to connect; did you specify the correct index name?") from e + raise e from e # Override signature sig = inspect.signature(func) diff --git a/pinecone/utils/find_legacy_imports.py b/pinecone/utils/find_legacy_imports.py new file mode 100755 index 00000000..5421de28 --- /dev/null +++ b/pinecone/utils/find_legacy_imports.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +""" +Script to identify legacy imports that were previously available via star imports. + +This script analyzes the codebase to find all imports that were previously available +via star imports but are no longer imported at the top level. +""" + +import ast +import os +from typing import Set + + +def find_star_imports(file_path: str) -> Set[str]: + """ + Find all star imports in a file. + + Args: + file_path: Path to the file to analyze. + + Returns: + Set of module names that are imported with star imports. + """ + with open(file_path, "r") as f: + content = f.read() + + try: + tree = ast.parse(content) + except SyntaxError: + print(f"Warning: Could not parse {file_path}") + return set() + + star_imports = set() + + for node in ast.walk(tree): + if isinstance(node, ast.ImportFrom) and node.names[0].name == "*": + module_name = node.module + if module_name: + star_imports.add(module_name) + + return star_imports + + +def find_imported_names(file_path: str) -> Set[str]: + """ + Find all names that are imported in a file. + + Args: + file_path: Path to the file to analyze. + + Returns: + Set of imported names. + """ + with open(file_path, "r") as f: + content = f.read() + + try: + tree = ast.parse(content) + except SyntaxError: + print(f"Warning: Could not parse {file_path}") + return set() + + imported_names = set() + + for node in ast.walk(tree): + if isinstance(node, ast.Import): + for name in node.names: + imported_names.add(name.name) + elif isinstance(node, ast.ImportFrom): + for name in node.names: + if name.name != "*": + imported_names.add(name.name) + + return imported_names + + +def find_module_exports(module_path: str) -> Set[str]: + """ + Find all names that are exported by a module. + + Args: + module_path: Path to the module to analyze. + + Returns: + Set of exported names. + """ + try: + module = __import__(module_path, fromlist=["*"]) + return set(dir(module)) + except ImportError: + print(f"Warning: Could not import {module_path}") + return set() + + +def main(): + """ + Main function to find legacy imports. + """ + # Get the package root directory + package_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + # Find the __init__.py file + init_file = os.path.join(package_root, "__init__.py") + + # Find star imports in the __init__.py file + star_imports = find_star_imports(init_file) + + # Find all imported names in the __init__.py file + imported_names = find_imported_names(init_file) + + # Find all module exports + module_exports = {} + for module_name in star_imports: + module_exports[module_name] = find_module_exports(module_name) + + # Find all files in the package + package_files = [] + for root, _, files in os.walk(package_root): + for file in files: + if file.endswith(".py") and not file.startswith("__"): + package_files.append(os.path.join(root, file)) + + # Find all imports in the package + package_imports = set() + for file in package_files: + package_imports.update(find_imported_names(file)) + + # Find legacy imports + legacy_imports = {} + for module_name, exports in module_exports.items(): + for export in exports: + if export in package_imports and export not in imported_names: + legacy_imports[f"pinecone.{export}"] = (module_name, export) + + # Print the legacy imports + print("LEGACY_IMPORTS = {") + for legacy_name, (module_path, actual_name) in sorted(legacy_imports.items()): + print(f" '{legacy_name}': ('{module_path}', '{actual_name}'),") + print("}") + + +if __name__ == "__main__": + main() diff --git a/pinecone/utils/lazy_imports.py b/pinecone/utils/lazy_imports.py new file mode 100644 index 00000000..0a55c8f4 --- /dev/null +++ b/pinecone/utils/lazy_imports.py @@ -0,0 +1,76 @@ +""" +Lazy import handler for Pinecone. + +This module provides a way to lazily load imports that were previously +available via star imports but are no longer imported at the top level. +""" + +import importlib +import sys +from types import ModuleType +from typing import Dict, Optional, Tuple, cast + +# Dictionary mapping import names to their actual module paths +# Format: 'name': ('module_path', 'actual_name') +LAZY_IMPORTS: Dict[str, Tuple[str, str]] = { + # Example: 'Vector': ('pinecone.db_data.models', 'Vector') + # Add all your lazy imports here +} + + +class LazyModule: + def __init__(self, original_module, lazy_imports): + self._original_module = original_module + self._lazy_imports = lazy_imports + self._loaded_attrs = {} + + def __dir__(self): + # Get the base directory listing from the original module + base_dir = dir(self._original_module) + + # Add lazy-loaded items + lazy_dir = list(self._lazy_imports.keys()) + + # Return combined list + return sorted(set(base_dir + lazy_dir)) + + def __getattr__(self, name): + # First try the original module + try: + return getattr(self._original_module, name) + except AttributeError: + pass + + # Then try lazy imports + if name in self._lazy_imports: + if name not in self._loaded_attrs: + module_path, item_name = self._lazy_imports[name] + module = importlib.import_module(module_path) + self._loaded_attrs[name] = getattr(module, item_name) + return self._loaded_attrs[name] + + raise AttributeError(f"module '{self._original_module.__name__}' has no attribute '{name}'") + + +def setup_lazy_imports(lazy_imports: Optional[Dict[str, Tuple[str, str]]] = None) -> None: + """ + Set up the lazy import handler. + + Args: + lazy_imports: Optional dictionary of imports to handle lazily. + If None, uses the default LAZY_IMPORTS dictionary. + """ + if lazy_imports is None: + lazy_imports = LAZY_IMPORTS + + # Only proceed if the pinecone module is already loaded + if "pinecone" not in sys.modules: + return + + # Create a proxy for the pinecone module + original_module = sys.modules["pinecone"] + proxy = LazyModule(original_module, lazy_imports) + + # Replace the pinecone module with our proxy + # Use a type cast to satisfy the type checker + sys.modules["pinecone"] = cast(ModuleType, proxy) diff --git a/pinecone/utils/legacy_imports.py b/pinecone/utils/legacy_imports.py new file mode 100644 index 00000000..9013acdd --- /dev/null +++ b/pinecone/utils/legacy_imports.py @@ -0,0 +1,112 @@ +""" +Legacy import handler for Pinecone. + +This module provides a simple way to handle legacy imports that were previously +available via star imports but are no longer imported at the top level. +""" + +import importlib +import sys +from types import ModuleType +from typing import Dict, Optional, Set, Any, Tuple, cast + +# Dictionary mapping legacy import names to their actual module paths +# Format: 'name': ('module_path', 'actual_name') +LEGACY_IMPORTS: Dict[str, Tuple[str, str]] = { + # Example: 'Vector': ('pinecone.db_data.models', 'Vector') + # Add all your legacy imports here +} + + +class LegacyImportProxy: + """ + A proxy module that handles legacy imports with warnings. + + This class is used to replace the pinecone module in sys.modules + to handle legacy imports that were previously available via star imports. + """ + + def __init__(self, original_module: Any, legacy_imports: Dict[str, Tuple[str, str]]): + """ + Initialize the proxy module. + + Args: + original_module: The original module to proxy. + legacy_imports: Dictionary of legacy imports to handle. + """ + self._original_module = original_module + self._legacy_imports = legacy_imports + self._warned_imports: Set[str] = set() + self._loaded_modules: Dict[str, Any] = {} + + def __getattr__(self, name: str) -> Any: + """ + Handle attribute access for legacy imports. + + Args: + name: The name of the attribute being accessed. + + Returns: + The requested attribute. + + Raises: + AttributeError: If the attribute cannot be found. + """ + # First, try to get the attribute from the original module + try: + return getattr(self._original_module, name) + except AttributeError: + pass + + # Check if this is a legacy import + if name in self._legacy_imports: + module_path, actual_name = self._legacy_imports[name] + + # Only warn once per import + # if name not in self._warned_imports: + # warnings.warn( + # f"Importing '{name}' directly from 'pinecone' is deprecated. " + # f"Please import it from '{module_path}' instead. " + # f"This import will be removed in a future version.", + # DeprecationWarning, + # stacklevel=2 + # ) + # self._warned_imports.add(name) + + # Load the module if not already loaded + if module_path not in self._loaded_modules: + try: + self._loaded_modules[module_path] = importlib.import_module(module_path) + except ImportError: + raise AttributeError(f"module 'pinecone' has no attribute '{name}'") + + # Get the actual object + module = self._loaded_modules[module_path] + if hasattr(module, actual_name): + return getattr(module, actual_name) + + raise AttributeError(f"module 'pinecone' has no attribute '{name}'") + + +def setup_legacy_imports(legacy_imports: Optional[Dict[str, Tuple[str, str]]] = None) -> None: + """ + Set up the legacy import handler. + + Args: + legacy_imports: Optional dictionary of legacy imports to handle. + If None, uses the default LEGACY_IMPORTS dictionary. + """ + if legacy_imports is None: + legacy_imports = LEGACY_IMPORTS + + # Only proceed if the pinecone module is already loaded + if "pinecone" not in sys.modules: + return + + # Create a proxy for the pinecone module + original_module = sys.modules["pinecone"] + proxy = LegacyImportProxy(original_module, legacy_imports) + + # Replace the pinecone module with our proxy + # Use a type cast to satisfy the type checker + sys.modules["pinecone"] = cast(ModuleType, proxy) diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index ce1e4b87..56c54e90 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -1,7 +1,7 @@ +from typing import Any from .setup_openapi_client import build_plugin_setup_client from pinecone.config import Config -from pinecone.openapi_support.configuration import Configuration as OpenApiConfig - +from pinecone.config.openapi_configuration import Configuration as OpenApiConfig from pinecone_plugin_interface import load_and_install as install_plugins import logging @@ -11,17 +11,120 @@ class PluginAware: + """ + Base class for classes that support plugin loading. + + This class provides functionality to lazily load plugins when they are first accessed. + Subclasses must set the following attributes before calling super().__init__(): + - config: Config + - _openapi_config: OpenApiConfig + - _pool_threads: int + + These attributes are considered private and should not be used by end users. The config property + is also considered private, but it was originally named without the underscore and this name + can't be changed without breaking compatibility with plugins in the wild. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """ + Initialize the PluginAware class. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Raises: + AttributeError: If required attributes are not set in the subclass. + """ + self._plugins_loaded = False + """ @private """ + + # Check for required attributes after super().__init__ has been called + missing_attrs = [] + if not hasattr(self, "config"): + missing_attrs.append("config") + if not hasattr(self, "_openapi_config"): + missing_attrs.append("_openapi_config") + if not hasattr(self, "_pool_threads"): + missing_attrs.append("_pool_threads") + + if missing_attrs: + logger.error( + f"PluginAware class requires the following attributes: {', '.join(missing_attrs)}. " + f"These must be set in the {self.__class__.__name__} class's __init__ method " + f"before calling super().__init__()." + ) + raise AttributeError( + f"PluginAware class requires the following attributes: {', '.join(missing_attrs)}. " + f"These must be set in the {self.__class__.__name__} class's __init__ method " + f"before calling super().__init__()." + ) + + def __getattr__(self, name: str) -> Any: + """ + Called when an attribute is not found through the normal lookup process. + This allows for lazy loading of plugins when they are first accessed. + + Args: + name: The name of the attribute being accessed. + + Returns: + The requested attribute. + + Raises: + AttributeError: If the attribute cannot be found after loading plugins. + """ + logger.debug("__getattr__ called for %s", name) + # Check if this is one of the required attributes that should be set by subclasses + required_attrs = ["config", "_openapi_config", "_pool_threads"] + if name in required_attrs: + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{name}'. " + f"This attribute must be set in the subclass's __init__ method " + f"before calling super().__init__()." + ) + + if not self._plugins_loaded: + logger.debug("Loading plugins for %s", self.__class__.__name__) + # Use object.__getattribute__ to avoid triggering __getattr__ again + try: + config = object.__getattribute__(self, "config") + openapi_config = object.__getattribute__(self, "_openapi_config") + pool_threads = object.__getattribute__(self, "_pool_threads") + self.load_plugins( + config=config, openapi_config=openapi_config, pool_threads=pool_threads + ) + self._plugins_loaded = True + try: + return object.__getattribute__(self, name) + except AttributeError: + pass + except AttributeError: + # If we can't get the required attributes, we can't load plugins + pass + + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + def load_plugins( self, config: Config, openapi_config: OpenApiConfig, pool_threads: int ) -> None: - """@private""" + """ + Load plugins for the parent class. + + Args: + config: The Pinecone configuration. + openapi_config: The OpenAPI configuration. + pool_threads: The number of threads in the pool. + """ try: - # I don't expect this to ever throw, but wrapping this in a - # try block just in case to make sure a bad plugin doesn't - # halt client initialization. + # Build the OpenAPI client for plugin setup openapi_client_builder = build_plugin_setup_client( config=config, openapi_config=openapi_config, pool_threads=pool_threads ) + # Install plugins install_plugins(self, openapi_client_builder) + logger.debug("Plugins loaded successfully for %s", self.__class__.__name__) + except ImportError as e: + logger.warning("Failed to import plugin module: %s", e) except Exception as e: - logger.error(f"Error loading plugins: {e}") + logger.error("Error loading plugins: %s", e, exc_info=True) diff --git a/pinecone/utils/repr_overrides.py b/pinecone/utils/repr_overrides.py index e3dfdb66..ce13e487 100644 --- a/pinecone/utils/repr_overrides.py +++ b/pinecone/utils/repr_overrides.py @@ -1,15 +1,45 @@ import json from datetime import datetime +import readline +import os +import atexit def custom_serializer(obj): if isinstance(obj, datetime): return obj.isoformat() - else: + try: + # First try to get a dictionary representation if available + if hasattr(obj, "to_dict"): + return obj.to_dict() + # Fall back to string representation return str(obj) + except (TypeError, RecursionError): + # If we hit any serialization issues, return a safe string representation + return f"<{obj.__class__.__name__} object>" def install_json_repr_override(klass): klass.__repr__ = lambda self: json.dumps( self.to_dict(), indent=4, sort_keys=False, default=custom_serializer ) + + +def setup_readline_history(): + """Setup readline history for the custom REPL.""" + # Create .pinecone directory in user's home if it doesn't exist + history_dir = os.path.expanduser("~/.pinecone") + os.makedirs(history_dir, exist_ok=True) + + # Set up history file + history_file = os.path.join(history_dir, "repl_history") + + # Load history if it exists + if os.path.exists(history_file): + readline.read_history_file(history_file) + + # Set history size + readline.set_history_length(1000) + + # Save history on exit + atexit.register(readline.write_history_file, history_file) diff --git a/pinecone/utils/require_kwargs.py b/pinecone/utils/require_kwargs.py new file mode 100644 index 00000000..9321f468 --- /dev/null +++ b/pinecone/utils/require_kwargs.py @@ -0,0 +1,16 @@ +import functools +import inspect + + +def require_kwargs(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if len(args) > 1: # First arg is self + param_names = list(inspect.signature(func).parameters.keys())[1:] # Skip self + raise TypeError( + f"{func.__name__}() requires keyword arguments. " + f"Please use {func.__name__}({', '.join(f'{name}=value' for name in param_names)})" + ) + return func(*args, **kwargs) + + return wrapper diff --git a/pinecone/utils/user_agent.py b/pinecone/utils/user_agent.py index 845a0ab1..b52a4edb 100644 --- a/pinecone/utils/user_agent.py +++ b/pinecone/utils/user_agent.py @@ -1,5 +1,3 @@ -import urllib3 - from .version import __version__ from .constants import SOURCE_TAG import re @@ -19,11 +17,11 @@ def _build_source_tag_field(source_tag): def _get_user_agent(client_id, config): - user_agent_details = {"urllib3": urllib3.__version__} - user_agent = "{} ({})".format( - client_id, ", ".join([f"{k}:{v}" for k, v in user_agent_details.items()]) + user_agent = ( + f"{client_id}; {_build_source_tag_field(config.source_tag)}" + if config.source_tag + else client_id ) - user_agent += f"; {_build_source_tag_field(config.source_tag)}" if config.source_tag else "" return user_agent diff --git a/poetry.lock b/poetry.lock index 427dc1e2..823ed4af 100644 --- a/poetry.lock +++ b/poetry.lock @@ -109,6 +109,20 @@ yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +[[package]] +name = "aiohttp-retry" +version = "2.9.1" +description = "Simple retry client for aiohttp" +optional = true +python-versions = ">=3.7" +files = [ + {file = "aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54"}, + {file = "aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1"}, +] + +[package.dependencies] +aiohttp = "*" + [[package]] name = "aiosignal" version = "1.3.1" @@ -1001,13 +1015,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -1130,6 +1144,21 @@ pygments = ">=2.12.0" [package.extras] dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] +[[package]] +name = "pinecone-plugin-assistant" +version = "1.6.0" +description = "Assistant plugin for Pinecone SDK" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "pinecone_plugin_assistant-1.6.0-py3-none-any.whl", hash = "sha256:d742273d136fba66d020f1af01af2c6bfbc802f7ff9ddf46c590b7ea26932175"}, + {file = "pinecone_plugin_assistant-1.6.0.tar.gz", hash = "sha256:b7c531743f87269ba567dd6084b1464b62636a011564d414bc53147571b2f2c1"}, +] + +[package.dependencies] +packaging = ">=24.2,<25.0" +requests = ">=2.32.3,<3.0.0" + [[package]] name = "pinecone-plugin-interface" version = "0.0.7" @@ -1332,6 +1361,29 @@ files = [ googleapis-common-protos = "*" protobuf = ">=4.21.0" +[[package]] +name = "psutil" +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[package.extras] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -1497,6 +1549,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.1.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +files = [ + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pytz" version = "2023.3.post1" @@ -1668,6 +1734,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tuna" +version = "0.5.11" +description = "Visualize Python performance profiles" +optional = false +python-versions = ">=3.6" +files = [ + {file = "tuna-0.5.11-py3-none-any.whl", hash = "sha256:ab352a6d836014ace585ecd882148f1f7c68be9ea4bf9e9298b7127594dab2ef"}, + {file = "tuna-0.5.11.tar.gz", hash = "sha256:d47f3e39e80af961c8df016ac97d1643c3c60b5eb451299da0ab5fe411d8866c"}, +] + [[package]] name = "types-protobuf" version = "4.24.0.4" @@ -1796,6 +1873,20 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "vprof" +version = "0.38" +description = "Visual profiler for Python" +optional = false +python-versions = "*" +files = [ + {file = "vprof-0.38-py3-none-any.whl", hash = "sha256:91b91d8868176c29e0fe3426c9239d11cd192c7144c7baf26a211e48923a5ee8"}, + {file = "vprof-0.38.tar.gz", hash = "sha256:7f1000912eeb7a450c7c94d3cc96739af45ad0ff01d5abcc0b09a175d40ffadb"}, +] + +[package.dependencies] +psutil = ">=3" + [[package]] name = "yarl" version = "1.17.2" @@ -1893,10 +1984,10 @@ multidict = ">=4.0" propcache = ">=0.2.0" [extras] -asyncio = ["aiohttp"] +asyncio = ["aiohttp", "aiohttp-retry"] grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protobuf", "protoc-gen-openapiv2"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "8a10046c5826a9773836e6b3ee50271bb0077d0faf32d709f1e65c4bb1fc53ea" +content-hash = "cc8b764abfc3d9ba774410ef118817c736c3c74a2bfa7f9f32a462628d804739" diff --git a/pyproject.toml b/pyproject.toml index 0525d08d..7b987cbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,9 @@ classifiers=[ "Topic :: Software Development :: Libraries :: Python Modules" ] +[tool.poetry.scripts] +repl = "scripts.repl:main" + [tool.poetry.dependencies] python = "^3.9" typing-extensions = ">=3.7.4" @@ -54,6 +57,7 @@ protoc-gen-openapiv2 = {version = "^0.0.1", optional = true } pinecone-plugin-interface = "^0.0.7" python-dateutil = ">=2.5.3" aiohttp = { version = ">=3.9.0", optional = true } +aiohttp-retry = { version = "^2.9.1", optional = true } [tool.poetry.group.types] optional = true @@ -94,11 +98,14 @@ urllib3_mock = "0.3.3" responses = ">=0.8.1" ruff = "^0.9.3" beautifulsoup4 = "^4.13.3" - +pinecone-plugin-assistant = "^1.6.0" +vprof = "^0.38" +tuna = "^0.5.11" +python-dotenv = "^1.1.0" [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] -asyncio = ["aiohttp"] +asyncio = ["aiohttp", "aiohttp-retry"] [build-system] requires = ["poetry-core"] @@ -152,7 +159,7 @@ docstring-code-line-length = "dynamic" [tool.ruff.lint.per-file-ignores] # F403 Allow star imports # F401 allow imported but unused -"__init__.py" = ["F401", "F403"] +"__init__.py" = ["F401", "F403", "F405"] # E402 Allow module level import not at top of file so # tqdm warnings can be disabled ahead of loading any code diff --git a/scripts/repl.py b/scripts/repl.py new file mode 100644 index 00000000..82d5ce26 --- /dev/null +++ b/scripts/repl.py @@ -0,0 +1,114 @@ +import dotenv +import code +import readline +from pinecone import Pinecone +import logging +import os +import time + + +def main(): + # You can add any setup code here, such as: + # - Setting environment variables + # - Importing commonly used modules + # - Setting up logging + # - Loading configuration files + + dotenv.load_dotenv() + logging.basicConfig( + level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" + ) + logger = logging.getLogger(__name__) + + # Set up readline history + histfile = os.path.join(os.path.expanduser("~"), ".python_repl_history") + try: + readline.read_history_file(histfile) + # Set history file size + readline.set_history_length(1000) + except FileNotFoundError: + pass + + # Start the interactive REPL + banner = """ + Welcome to the custom Python REPL! + Your initialization steps have been completed. + + Two Pinecone objects are available: + - pc: Interact with the one-offs project + - pcci: Interact with the pinecone-python-client project (CI testing) + + You can use the following functions to clean up the environment: + - delete_all_indexes(pc) + - delete_all_collections(pc) + - delete_all_backups(pc) + - cleanup_all(pc) + """ + + # In situations where there are a lot of resources, we want to + # slow down the rate of requests + sleep_interval = 30 + + def delete_all_indexes(pc): + for index in pc.db.index.list(): + logger.info(f"Deleting index {index.name}") + try: + if index.deletion_protection == "enabled": + logger.info(f"Disabling deletion protection for index {index.name}") + pc.db.index.configure(name=index.name, deletion_protection="disabled") + pc.db.index.delete(name=index.name) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting index {index.name}: {e}") + + def delete_all_collections(pc): + for collection in pc.db.collection.list(): + logger.info(f"Deleting collection {collection.name}") + try: + pc.db.collection.delete(name=collection.name) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting collection {collection.name}: {e}") + + def delete_all_backups(pc): + for backup in pc.db.backup.list(): + logger.info(f"Deleting backup {backup.name}") + try: + pc.db.backup.delete(backup_id=backup.backup_id) + time.sleep(sleep_interval) + except Exception as e: + logger.error(f"Error deleting backup {backup.name}: {e}") + + def cleanup_all(pc): + delete_all_indexes(pc) + delete_all_collections(pc) + delete_all_backups(pc) + + # We want to route through preprod by default + if os.environ.get("PINECONE_ADDITIONAL_HEADERS") is None: + os.environ["PINECONE_ADDITIONAL_HEADERS"] = '{"x-environment": "preprod-aws-0"}' + + # Create a custom namespace with any pre-loaded variables + namespace = { + "__name__": "__main__", + "__doc__": None, + "pc": Pinecone(), + "pcci": Pinecone(api_key=os.environ.get("PINECONE_API_KEY_CI_TESTING")), + "delete_all_indexes": delete_all_indexes, + "delete_all_collections": delete_all_collections, + "delete_all_backups": delete_all_backups, + "cleanup_all": cleanup_all, + "pcl": Pinecone(host="http://localhost:8000"), + # Add any other variables you want to have available in the REPL + } + + try: + # Start the interactive console + code.interact(banner=banner, local=namespace) + finally: + # Save history when exiting + readline.write_history_file(histfile) + + +if __name__ == "__main__": + main() diff --git a/scripts/test-async-retry.py b/scripts/test-async-retry.py new file mode 100644 index 00000000..ca5f9bb4 --- /dev/null +++ b/scripts/test-async-retry.py @@ -0,0 +1,16 @@ +import dotenv +import asyncio +import logging +from pinecone import PineconeAsyncio + +dotenv.load_dotenv() + +logging.basicConfig(level=logging.DEBUG) + + +async def main(): + async with PineconeAsyncio(host="http://localhost:8000") as pc: + await pc.db.index.list() + + +asyncio.run(main()) diff --git a/scripts/test-server.py b/scripts/test-server.py new file mode 100644 index 00000000..784d510d --- /dev/null +++ b/scripts/test-server.py @@ -0,0 +1,136 @@ +from http.server import BaseHTTPRequestHandler, HTTPServer +import json + +backups_response = { + "data": [ + { + "backup_id": "6f52240b-6397-481b-9767-748a2d4d3b65", + "source_index_name": "jensparse", + "source_index_id": "71ded150-2b8e-422d-9849-097f2c89d18b", + "status": "Ready", + "cloud": "aws", + "region": "us-east-1", + "tags": {}, + "name": "sparsebackup", + "description": "", + "dimension": 0, + "record_count": 10000, + "namespace_count": 1000, + "size_bytes": 123456, + "created_at": "2025-05-15T20:55:29.477794Z", + } + ] +} + +indexes_response = { + "indexes": [ + { + "name": "jhamon-20250515-165135548-reorg-create-with-e", + "metric": "dotproduct", + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": True, "state": "Ready"}, + "vector_type": "sparse", + "dimension": None, + "deletion_protection": "disabled", + "tags": {"env": "dev"}, + }, + { + "name": "unexpected", + "metric": "newmetric", + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": False, "state": "UnknownStatus"}, + "vector_type": "sparse", + "dimension": -1, + "deletion_protection": "disabled", + "tags": {"env": "dev"}, + }, + { + "name": "wrong-types", + "metric": 123, + "host": "jhamon-20250515-165135548-reorg-create-with-e-bt8x3su.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"cloud": "aws", "region": "us-east-1"}}, + "status": {"ready": False, "state": "UnknownStatus"}, + "vector_type": None, + "dimension": None, + "deletion_protection": "asdf", + "tags": None, + }, + ] +} + +index_description_response = { + "name": "docs-example-dense", + "vector_type": "dense", + "metric": "cosine", + "dimension": 1536, + "status": {"ready": True, "state": "Ready"}, + "host": "docs-example-dense-govk0nt.svc.aped-4627-b74a.pinecone.io", + "spec": {"serverless": {"region": "us-east-1", "cloud": "aws"}}, + "deletion_protection": "disabled", + "tags": {"environment": "development"}, +} + +upsert_response = {"upsertedCount": 10} + +call_count = 0 + + +class MyHandler(BaseHTTPRequestHandler): + def do_POST(self): + global call_count + call_count += 1 + + # Simulate a high rate of 500 errors + if call_count % 5 != 0: + self.send_response(500) + self.end_headers() + return + + if self.path.startswith("/vectors/upsert"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = upsert_response + self.wfile.write(json.dumps(response).encode()) + else: + self.send_response(404) + self.end_headers() + + def do_GET(self): + global call_count + call_count += 1 + + # Simulate a high rate of 500 errors + if call_count % 5 != 0: + self.send_response(500) + self.end_headers() + return + + if self.path.startswith("/backups"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = backups_response + self.wfile.write(json.dumps(response).encode()) + elif self.path.startswith("/indexes/"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = index_description_response + self.wfile.write(json.dumps(response).encode()) + elif self.path.startswith("/indexes"): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + response = indexes_response + self.wfile.write(json.dumps(response).encode()) + else: + self.send_response(404) + self.end_headers() + + +server = HTTPServer(("localhost", 8000), MyHandler) +print("Serving on http://localhost:8000") +server.serve_forever() diff --git a/tests/__init__.py b/tests/__init__.py index f2dab92a..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +0,0 @@ -import logging - -logging.basicConfig( - format="%(levelname)s [%(asctime)s] %(name)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" -) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index e69de29b..76acad39 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -0,0 +1,3 @@ +import dotenv + +dotenv.load_dotenv() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 00000000..e42eedae --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,40 @@ +import logging +from pinecone import Pinecone +from datetime import datetime, timedelta + +logger = logging.getLogger(__name__) + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + try: + # Initialize Pinecone client + pc = Pinecone() + + # Get all indexes + indexes = pc.list_indexes() + + # Find test indexes (those created during this test run) + test_indexes = [idx for idx in indexes.names() if idx.startswith("test-")] + + # Delete test indexes that are older than 1 hour (in case of failed cleanup) + for index_name in test_indexes: + try: + description = pc.describe_index(name=index_name) + created_at = datetime.fromisoformat(description.created_at.replace("Z", "+00:00")) + + if datetime.now(created_at.tzinfo) - created_at > timedelta(hours=1): + logger.info(f"Cleaning up old test index: {index_name}") + pc.delete_index(name=index_name, timeout=-1) + except Exception as e: + logger.warning(f"Failed to clean up index {index_name}: {str(e)}") + + except Exception as e: + logger.error(f"Error during final cleanup: {str(e)}") + + logger.info("Final cleanup completed") diff --git a/tests/integration/control/pod/conftest.py b/tests/integration/control/pod/conftest.py index dc1418eb..cbfdcc72 100644 --- a/tests/integration/control/pod/conftest.py +++ b/tests/integration/control/pod/conftest.py @@ -7,10 +7,7 @@ @pytest.fixture() def client(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) + return Pinecone() @pytest.fixture() @@ -66,10 +63,7 @@ def notready_index(client, index_name, create_index_params): @pytest.fixture(scope="session") def reusable_collection(): - pc = Pinecone( - api_key=get_environment_var("PINECONE_API_KEY"), - additional_headers={"sdk-test-suite": "pinecone-python-client"}, - ) + pc = Pinecone() index_name = generate_index_name("temp-index") dimension = int(get_environment_var("DIMENSION")) print(f"Creating index {index_name} to prepare a collection...") diff --git a/tests/integration/control/resources/__init__.py b/tests/integration/control/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/backup/__init__.py b/tests/integration/control/resources/backup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/backup/test_backup.py b/tests/integration/control/resources/backup/test_backup.py new file mode 100644 index 00000000..f61df1e5 --- /dev/null +++ b/tests/integration/control/resources/backup/test_backup.py @@ -0,0 +1,193 @@ +import pytest +import random +from ....helpers import random_string, poll_stats_for_namespace +import logging +import time +from pinecone import Pinecone + +logger = logging.getLogger(__name__) + + +class TestBackups: + def test_create_backup(self, pc: Pinecone, ready_sl_index, index_tags): + desc = pc.db.index.describe(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + idx = pc.Index(name=ready_sl_index) + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + idx.upsert( + vectors=[ + {"id": random_string(15), "values": [random.random() for _ in range(dimension)]} + for _ in range(batch_size) + ], + namespace=ns, + ) + + poll_stats_for_namespace(idx=idx, namespace=ns, expected_count=batch_size * num_batches) + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + time.sleep(180) + + index_stats = idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = pc.db.backup.create(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = pc.db.backup.describe(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = pc.db.backup.describe(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + time.sleep(5) + + # Verify that the backup shows in list + backups_list = pc.db.backup.list(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = pc.db.index.create_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + logger.info("Listing restore jobs") + restore_jobs = pc.db.restore_job.list() + assert len(restore_jobs) >= 1, f"Expected at least one restore job, got {len(restore_jobs)}" + + # Verify that the new index has the same data as the original index + new_idx = pc.Index(name=new_index_name) + stats = new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + pc.db.index.delete(name=new_index_name) + + # Delete the backup + pc.db.backup.delete(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + pc.db.backup.describe(backup_id=backup.backup_id) + + def test_create_backup_legacy_syntax(self, pc: Pinecone, ready_sl_index, index_tags): + desc = pc.describe_index(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + idx = pc.Index(name=ready_sl_index) + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + idx.upsert( + vectors=[ + {"id": random_string(15), "values": [random.random() for _ in range(dimension)]} + for _ in range(batch_size) + ], + namespace=ns, + ) + + poll_stats_for_namespace(idx=idx, namespace=ns, expected_count=batch_size * num_batches) + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + time.sleep(180) + + index_stats = idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = pc.create_backup(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = pc.describe_backup(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = pc.describe_backup(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + time.sleep(5) + + # Verify that the backup shows in list + backups_list = pc.list_backups(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = pc.create_index_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = pc.list_restore_jobs() + assert len(restore_jobs) >= 1 + + # Verify that the new index has the same data as the original index + new_idx = pc.Index(name=new_index_name) + stats = new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + pc.delete_index(name=new_index_name) + + # Delete the backup + pc.delete_backup(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + pc.describe_backup(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = pc.list_backups(index_name=ready_sl_index) + assert len(backup_list) == 0 diff --git a/tests/integration/control/resources/collections/__init__.py b/tests/integration/control/resources/collections/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/collections/helpers.py b/tests/integration/control/resources/collections/helpers.py new file mode 100644 index 00000000..58633a69 --- /dev/null +++ b/tests/integration/control/resources/collections/helpers.py @@ -0,0 +1,57 @@ +import time +import random +import logging + +logger = logging.getLogger(__name__) + + +def random_vector(dimension): + return [random.uniform(0, 1) for _ in range(dimension)] + + +def attempt_cleanup_collection(pc, collection_name): + max_wait = 120 + time_waited = 0 + deleted = False + + while time_waited < max_wait: + try: + pc.db.collection.delete(name=collection_name) + deleted = True + break + except Exception as e: + # Failures here usually happen because the backend thinks there is still some + # operation pending on the resource. + # These orphaned resources will get cleaned up by the cleanup job later. + logger.debug(f"Error while cleaning up collection: {e}") + logger.debug( + f"Waiting for collection {collection_name} to be deleted. Waited {time_waited} seconds..." + ) + time.sleep(10) + time_waited += 10 + if not deleted: + logger.warning(f"Collection {collection_name} was not deleted after {max_wait} seconds") + + +def attempt_cleanup_index(pc, index_name): + max_wait = 120 + time_waited = 0 + deleted = False + + while time_waited < max_wait: + try: + pc.db.index.delete(name=index_name) + deleted = True + break + except Exception as e: + # Failures here usually happen because the backend thinks there is still some + # operation pending on the resource. + # These orphaned resources will get cleaned up by the cleanup job later. + logger.debug(f"Error while cleaning up index: {e}") + logger.debug( + f"Waiting for index {index_name} to be deleted. Waited {time_waited} seconds..." + ) + time.sleep(10) + time_waited += 10 + if not deleted: + logger.warning(f"Index {index_name} was not deleted after {max_wait} seconds") diff --git a/tests/integration/control/resources/collections/test_dense_index.py b/tests/integration/control/resources/collections/test_dense_index.py new file mode 100644 index 00000000..6c76a962 --- /dev/null +++ b/tests/integration/control/resources/collections/test_dense_index.py @@ -0,0 +1,172 @@ +import time +from pinecone import PodSpec +from ....helpers import generate_index_name, generate_collection_name +import logging +from .helpers import attempt_cleanup_collection, attempt_cleanup_index, random_vector + +logger = logging.getLogger(__name__) + + +class TestCollectionsHappyPath: + def test_dense_index_to_collection_to_index(self, pc, pod_environment, index_tags): + # Create a pod index + index_name = generate_index_name("pod-index") + dimension = 10 + metric = "cosine" + pod_index = pc.db.index.create( + name=index_name, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=pod_environment), + tags=index_tags, + ) + + # Insert some vectors into the pod index + idx = pc.Index(host=pod_index.host) + num_vectors = 10 + namespaces = ["", "test-ns1", "test-ns2"] + for namespace in namespaces: + vectors = [(str(i), random_vector(dimension)) for i in range(num_vectors)] + idx.upsert(vectors=vectors, namespace=namespace) + + # Wait for the vectors to be available + all_vectors_available = False + max_wait = 180 + time_waited = 0 + while not all_vectors_available and time_waited < max_wait: + all_vectors_available = True + desc = idx.describe_index_stats() + for namespace in namespaces: + if ( + desc.namespaces.get(namespace, None) is None + or desc.namespaces[namespace]["vector_count"] != num_vectors + ): + logger.debug(f"Waiting for vectors to be available in namespace {namespace}...") + all_vectors_available = False + break + for namespace in namespaces: + for i in range(num_vectors): + try: + idx.fetch(ids=[str(i)], namespace=namespace) + except Exception: + logger.debug( + f"Waiting for vector {i} to be available in namespace {namespace}..." + ) + all_vectors_available = False + break + if not all_vectors_available: + time.sleep(5) + time_waited += 5 + if not all_vectors_available: + raise Exception(f"Vectors were not available after {max_wait} seconds") + + # Create a collection from the pod index + collection_name = generate_collection_name("coll1") + pc.db.collection.create(name=collection_name, source=index_name) + collection_desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {collection_desc}") + assert collection_desc["name"] == collection_name + assert collection_desc["environment"] == pod_environment + assert collection_desc["status"] is not None + + # Wait for the collection to be ready + time_waited = 0 + max_wait = 120 + collection_ready = collection_desc["status"] + while collection_ready.lower() != "ready" and time_waited < max_wait: + logger.debug( + f"Waiting for collection {collection_name} to be ready. Waited {time_waited} seconds..." + ) + desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {desc}") + collection_ready = desc["status"] + if collection_ready.lower() != "ready": + time.sleep(10) + time_waited += 10 + if collection_ready.lower() != "ready": + raise Exception(f"Collection {collection_name} is not ready after {max_wait} seconds") + + # Verify the collection was created + assert collection_name in pc.db.collection.list().names() + + # Verify the collection has the correct info + collection_desc = pc.db.collection.describe(name=collection_name) + logger.debug(f"Collection desc: {collection_desc}") + assert collection_desc["name"] == collection_name + assert collection_desc["environment"] == pod_environment + assert collection_desc["status"] == "Ready" + assert collection_desc["dimension"] == dimension + assert collection_desc["vector_count"] == len(namespaces) * num_vectors + assert collection_desc["size"] is not None + assert collection_desc["size"] > 0 + + # Create new index from collection + index_name2 = generate_index_name("index-from-collection-" + collection_name) + print(f"Creating index {index_name} from collection {collection_name}...") + new_index = pc.db.index.create( + name=index_name2, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=pod_environment, source_collection=collection_name), + tags=index_tags, + ) + logger.debug(f"Created index {index_name2} from collection {collection_name}: {new_index}") + + # Wait for the index to be ready + max_wait = 120 + time_waited = 0 + index_ready = False + while not index_ready and time_waited < max_wait: + logger.debug( + f"Waiting for index {index_name} to be ready. Waited {time_waited} seconds..." + ) + desc = pc.db.index.describe(name=index_name) + logger.debug(f"Index {index_name} status: {desc['status']}") + index_ready = desc["status"]["ready"] == True + if not index_ready: + time.sleep(10) + time_waited += 10 + if not index_ready: + raise Exception(f"Index {index_name} is not ready after {max_wait} seconds") + + new_index_desc = pc.db.index.describe(name=index_name) + logger.debug(f"New index desc: {new_index_desc}") + assert new_index_desc["name"] == index_name + assert new_index_desc["status"]["ready"] == True + + new_idx = pc.Index(name=index_name) + + # Verify stats reflect the vectors present in the collection + stats = new_idx.describe_index_stats() + logger.debug(f"New index stats: {stats}") + assert stats.total_vector_count == len(namespaces) * num_vectors + + # Verify the vectors from the collection can be fetched + for namespace in namespaces: + results = new_idx.fetch(ids=[v[0] for v in vectors], namespace=namespace) + logger.debug(f"Results for namespace {namespace}: {results}") + assert len(results.vectors) != 0 + + # Verify the vectors from the collection can be queried by id + for namespace in namespaces: + for i in range(num_vectors): + results = new_idx.query(top_k=3, id=str(i), namespace=namespace) + logger.debug( + f"Query results for namespace {namespace} and id {i} in index {index_name2}: {results}" + ) + assert len(results.matches) == 3 + + # Compapre with results from original index + original_results = idx.query(top_k=3, id=str(i), namespace=namespace) + logger.debug( + f"Original query results for namespace {namespace} and id {i} in index {index_name}: {original_results}" + ) + assert len(original_results.matches) == 3 + assert original_results.matches[0].id == results.matches[0].id + assert original_results.matches[1].id == results.matches[1].id + assert original_results.matches[2].id == results.matches[2].id + + # Cleanup + attempt_cleanup_collection(pc, collection_name) + attempt_cleanup_index(pc, index_name) + attempt_cleanup_index(pc, index_name2) diff --git a/tests/integration/control/resources/conftest.py b/tests/integration/control/resources/conftest.py new file mode 100644 index 00000000..93060a66 --- /dev/null +++ b/tests/integration/control/resources/conftest.py @@ -0,0 +1,66 @@ +import os +import pytest +import uuid +import logging +import dotenv +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control/resources/index/__init__.py b/tests/integration/control/resources/index/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/index/test_configure.py b/tests/integration/control/resources/index/test_configure.py new file mode 100644 index 00000000..f4c73094 --- /dev/null +++ b/tests/integration/control/resources/index/test_configure.py @@ -0,0 +1,43 @@ +class TestConfigureIndexTags: + def test_add_index_tags(self, pc, ready_sl_index): + starting_tags = pc.db.index.describe(name=ready_sl_index).tags + assert "foo" not in starting_tags + assert "bar" not in starting_tags + + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags["foo"] == "FOO" + assert found_tags["bar"] == "BAR" + + def test_remove_tags_by_setting_empty_value_for_key(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) == "FOO", "foo should not be removed" + assert found_tags.get("bar", None) == "BAR", "bar should not be removed" + + pc.db.index.configure(name=ready_sl_index, tags={"foo": ""}) + found_tags2 = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags2 is not None + assert found_tags2.get("foo", None) is None, "foo should be removed" + assert found_tags2.get("bar", None) == "BAR", "bar should not be removed" + + def test_merge_new_tags_with_existing_tags(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={"baz": "BAZ"}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) == "FOO", "foo should not be removed" + assert found_tags.get("bar", None) == "BAR", "bar should not be removed" + assert found_tags.get("baz", None) == "BAZ", "baz should be added" + + def test_remove_multiple_tags(self, pc, ready_sl_index): + pc.db.index.configure(name=ready_sl_index, tags={"foo": "FOO", "bar": "BAR"}) + pc.db.index.configure(name=ready_sl_index, tags={"foo": "", "bar": ""}) + found_tags = pc.db.index.describe(name=ready_sl_index).tags.to_dict() + assert found_tags is not None + assert found_tags.get("foo", None) is None, "foo should be removed" + assert found_tags.get("bar", None) is None, "bar should be removed" diff --git a/tests/integration/control/resources/index/test_create.py b/tests/integration/control/resources/index/test_create.py new file mode 100644 index 00000000..55bf66c2 --- /dev/null +++ b/tests/integration/control/resources/index/test_create.py @@ -0,0 +1,322 @@ +import pytest +import time +from pinecone import ( + Pinecone, + Metric, + VectorType, + DeletionProtection, + ServerlessSpec, + PodSpec, + CloudProvider, + AwsRegion, + PineconeApiValueError, + PineconeApiException, + PineconeApiTypeError, + PodIndexEnvironment, +) + + +class TestCreateServerlessIndexHappyPath: + def test_create_index(self, pc: Pinecone, index_name, index_tags): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" # default value + assert resp.vector_type == "dense" # default value + assert resp.deletion_protection == "disabled" # default value + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.deletion_protection == "disabled" # default value + assert desc.vector_type == "dense" # default value + + def test_create_skip_wait(self, pc, index_name, index_tags): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + timeout=-1, + tags=index_tags, + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + def test_create_infinite_wait(self, pc, index_name, index_tags): + resp = pc.db.index.create( + name=index_name, + dimension=10, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + timeout=None, + tags=index_tags, + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + def test_create_default_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) + if isinstance(metric, str): + assert desc.metric == metric + else: + assert desc.metric == metric.value + assert desc.vector_type == "dense" + + @pytest.mark.parametrize( + "metric_enum,vector_type_enum,dim", + [ + (Metric.COSINE, VectorType.DENSE, 10), + (Metric.EUCLIDEAN, VectorType.DENSE, 10), + (Metric.DOTPRODUCT, VectorType.SPARSE, None), + ], + ) + def test_create_with_enum_values( + self, pc, index_name, metric_enum, vector_type_enum, dim, index_tags + ): + args = { + "name": index_name, + "metric": metric_enum, + "vector_type": vector_type_enum, + "deletion_protection": DeletionProtection.DISABLED, + "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + "tags": index_tags, + } + if dim is not None: + args["dimension"] = dim + + pc.db.index.create(**args) + + desc = pc.db.index.describe(name=index_name) + assert desc.metric == metric_enum.value + assert desc.vector_type == vector_type_enum.value + assert desc.dimension == dim + assert desc.deletion_protection == DeletionProtection.DISABLED.value + assert desc.name == index_name + assert desc.spec.serverless.cloud == "aws" + assert desc.spec.serverless.region == "us-east-1" + assert desc.tags.to_dict() == index_tags + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + def test_create_dense_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + create_index_params["vector_type"] = VectorType.DENSE + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) + assert desc.metric == metric + assert desc.vector_type == "dense" + + +class TestCreatePodIndexHappyPath: + def test_create_index_minimal_config( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec(environment=pod_environment), + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.spec.pod.environment == pod_environment + assert desc.tags.to_dict() == index_tags + assert desc.status.ready == True + assert desc.status.state == "Ready" + assert desc.vector_type == "dense" + + def test_create_index_with_spec_options( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec( + environment=pod_environment, + pod_type="p1.x1", + replicas=2, + metadata_config={"indexed": ["foo", "bar"]}, + ), + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.spec.pod.environment == pod_environment + assert desc.spec.pod.pod_type == "p1.x1" + assert desc.spec.pod.replicas == 2 + assert desc.spec.pod.metadata_config.indexed == ["foo", "bar"] + assert desc.tags.to_dict() == index_tags + + def test_create_index_with_deletion_protection( + self, pc: Pinecone, index_name, pod_environment, index_tags + ): + pc.db.index.create( + name=index_name, + dimension=10, + metric="cosine", + spec=PodSpec(environment=pod_environment), + tags=index_tags, + deletion_protection=DeletionProtection.ENABLED, + ) + + try: + pc.db.index.delete(name=index_name) + except PineconeApiException as e: + assert "Deletion protection is enabled for this index" in str(e) + + pc.db.index.configure(name=index_name, deletion_protection=DeletionProtection.DISABLED) + max_wait_time = 60 + while pc.db.index.describe(name=index_name).status.ready == False: + time.sleep(1) + max_wait_time -= 1 + if max_wait_time <= 0: + raise Exception("Index did not become ready in time") + + pc.db.index.delete(name=index_name) + assert pc.db.index.has(name=index_name) == False + + +class TestCreatePodIndexApiErrorCases: + def test_pod_index_does_not_support_sparse_vectors(self, pc, index_name, index_tags): + with pytest.raises(PineconeApiException) as e: + pc.db.index.create( + name=index_name, + metric="dotproduct", + spec=PodSpec(environment=PodIndexEnvironment.US_EAST1_AWS), + vector_type="sparse", + tags=index_tags, + ) + assert "Sparse vector type is not supported for pod indexes" in str(e.value) + + +class TestCreateServerlessIndexApiErrorCases: + def test_create_index_with_invalid_name(self, pc, create_index_params): + create_index_params["name"] = "Invalid-name" + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_index_params) + + def test_create_index_invalid_metric(self, pc, create_index_params): + create_index_params["metric"] = "invalid" + with pytest.raises(PineconeApiValueError): + pc.db.index.create(**create_index_params) + + def test_create_index_with_invalid_neg_dimension(self, pc, create_index_params): + create_index_params["dimension"] = -1 + with pytest.raises(PineconeApiValueError): + pc.db.index.create(**create_index_params) + + def test_create_index_that_already_exists(self, pc, create_index_params): + pc.db.index.create(**create_index_params) + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_index_params) + + +class TestCreateServerlessIndexWithTimeout: + def test_create_index_default_timeout(self, pc, create_index_params): + create_index_params["timeout"] = None + pc.db.index.create(**create_index_params) + # Waits infinitely for index to be ready + desc = pc.db.index.describe(name=create_index_params["name"]) + assert desc.status.ready == True + + def test_create_index_when_timeout_set(self, pc, create_index_params): + create_index_params["timeout"] = ( + 1000 # effectively infinite, but different code path from None + ) + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) + assert desc.status.ready == True + + def test_create_index_with_negative_timeout(self, pc, create_index_params): + create_index_params["timeout"] = -1 + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) + # Returns immediately without waiting for index to be ready + assert desc.status.ready in [False, True] + + +class TestCreateIndexTypeErrorCases: + def test_create_index_with_invalid_str_dimension(self, pc, create_index_params): + create_index_params["dimension"] = "10" + with pytest.raises(PineconeApiTypeError): + pc.db.index.create(**create_index_params) + + def test_create_index_with_missing_dimension(self, pc, create_index_params): + del create_index_params["dimension"] + with pytest.raises(PineconeApiException): + pc.db.index.create(**create_index_params) + + def test_create_index_w_incompatible_options(self, pc, create_index_params): + create_index_params["pod_type"] = "p1.x2" + create_index_params["environment"] = "us-east1-gcp" + create_index_params["replicas"] = 2 + with pytest.raises(TypeError): + pc.db.index.create(**create_index_params) + + @pytest.mark.parametrize("required_option", ["name", "spec", "dimension"]) + def test_create_with_missing_required_options(self, pc, create_index_params, required_option): + del create_index_params[required_option] + with pytest.raises(Exception) as e: + pc.db.index.create(**create_index_params) + assert required_option.lower() in str(e.value).lower() + + +class TestSparseIndex: + def test_create_sparse_index_minimal_config(self, pc: Pinecone, index_name, index_tags): + pc.db.index.create( + name=index_name, + metric="dotproduct", + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + vector_type=VectorType.SPARSE, + tags=index_tags, + ) + + desc = pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.metric == "dotproduct" + assert desc.vector_type == "sparse" + + +class TestSparseIndexErrorCases: + def test_sending_dimension_with_sparse_index(self, pc, index_tags): + with pytest.raises(ValueError) as e: + pc.db.index.create( + name="test-index", + dimension=10, + metric="dotproduct", + vector_type=VectorType.SPARSE, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, + ) + assert "dimension should not be specified for sparse indexes" in str(e.value) + + @pytest.mark.parametrize("bad_metric", ["cosine", "euclidean"]) + def test_sending_metric_other_than_dotproduct_with_sparse_index( + self, pc, index_tags, bad_metric + ): + with pytest.raises(PineconeApiException) as e: + pc.db.index.create( + name="test-index", + metric=bad_metric, + vector_type=VectorType.SPARSE, + spec=ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + tags=index_tags, + ) + assert "Sparse vector indexes must use the metric dotproduct" in str(e.value) diff --git a/tests/integration/control/resources/index/test_delete.py b/tests/integration/control/resources/index/test_delete.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/index/test_describe.py b/tests/integration/control/resources/index/test_describe.py new file mode 100644 index 00000000..eacb412e --- /dev/null +++ b/tests/integration/control/resources/index/test_describe.py @@ -0,0 +1,46 @@ +from pinecone import IndexModel + + +class TestDescribeIndex: + def test_describe_index_when_ready(self, pc, ready_sl_index, create_index_params): + description = pc.db.index.describe(name=ready_sl_index) + + assert isinstance(description, IndexModel) + assert description.name == ready_sl_index + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] + assert ( + description.spec.serverless["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] + ) + assert ( + description.spec.serverless["region"] + == create_index_params["spec"]["serverless"]["region"] + ) + + assert isinstance(description.host, str) + assert description.host != "" + assert ready_sl_index in description.host + + assert description.status.state == "Ready" + assert description.status.ready == True + + def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_index_params): + description = pc.db.index.describe(name=notready_sl_index) + + assert isinstance(description, IndexModel) + assert description.name == notready_sl_index + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] + assert ( + description.spec.serverless["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] + ) + assert ( + description.spec.serverless["region"] + == create_index_params["spec"]["serverless"]["region"] + ) + + assert isinstance(description.host, str) + assert description.host != "" + assert notready_sl_index in description.host diff --git a/tests/integration/control/resources/index/test_has.py b/tests/integration/control/resources/index/test_has.py new file mode 100644 index 00000000..62aba165 --- /dev/null +++ b/tests/integration/control/resources/index/test_has.py @@ -0,0 +1,18 @@ +from ....helpers import random_string + + +class TestHasIndex: + def test_index_exists_success(self, pc, create_index_params): + name = create_index_params["name"] + pc.db.index.create(**create_index_params) + has_index = pc.db.index.has(name=name) + assert has_index == True + + def test_index_does_not_exist(self, pc): + name = random_string(8) + has_index = pc.db.index.has(name=name) + assert has_index == False + + def test_has_index_with_null_index_name(self, pc): + has_index = pc.db.index.has(name="") + assert has_index == False diff --git a/tests/integration/control/resources/index/test_list.py b/tests/integration/control/resources/index/test_list.py new file mode 100644 index 00000000..e45d15b9 --- /dev/null +++ b/tests/integration/control/resources/index/test_list.py @@ -0,0 +1,27 @@ +from pinecone import IndexModel + + +class TestListIndexes: + def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_index_params): + list_response = pc.db.index.list() + assert len(list_response.indexes) != 0 + assert isinstance(list_response.indexes[0], IndexModel) + + created_index = [index for index in list_response.indexes if index.name == ready_sl_index][ + 0 + ] + assert created_index.name == ready_sl_index + assert created_index.dimension == create_index_params["dimension"] + assert created_index.metric == create_index_params["metric"] + assert ready_sl_index in created_index.host + + def test_list_indexes_includes_not_ready_indexes(self, pc, notready_sl_index): + list_response = pc.db.index.list() + assert len(list_response.indexes) != 0 + assert isinstance(list_response.indexes[0], IndexModel) + + created_index = [ + index for index in list_response.indexes if index.name == notready_sl_index + ][0] + assert created_index.name == notready_sl_index + assert notready_sl_index in created_index.name diff --git a/tests/integration/control/resources/restore_job/__init__.py b/tests/integration/control/resources/restore_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control/resources/restore_job/test_describe.py b/tests/integration/control/resources/restore_job/test_describe.py new file mode 100644 index 00000000..7b3809b9 --- /dev/null +++ b/tests/integration/control/resources/restore_job/test_describe.py @@ -0,0 +1,38 @@ +import pytest +from pinecone import Pinecone, PineconeApiException +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + + +class TestRestoreJobDescribe: + def test_describe_restore_job(self, pc: Pinecone): + jobs = pc.db.restore_job.list() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = pc.db.restore_job.describe(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + assert restore_job.restore_job_id == restore_job_id + assert restore_job.backup_id is not None + assert isinstance(restore_job.status, str) + assert isinstance(restore_job.backup_id, str) + assert isinstance(restore_job.completed_at, datetime) + assert isinstance(restore_job.created_at, datetime) + assert isinstance(restore_job.percent_complete, float) + assert isinstance(restore_job.target_index_id, str) + assert isinstance(restore_job.target_index_name, str) + + def test_describe_restore_job_legacy_syntax(self, pc: Pinecone): + jobs = pc.list_restore_jobs() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = pc.describe_restore_job(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + def test_describe_restore_job_with_invalid_job_id(self, pc: Pinecone): + with pytest.raises(PineconeApiException): + pc.db.restore_job.describe(job_id="invalid") diff --git a/tests/integration/control/resources/restore_job/test_list.py b/tests/integration/control/resources/restore_job/test_list.py new file mode 100644 index 00000000..379b37dd --- /dev/null +++ b/tests/integration/control/resources/restore_job/test_list.py @@ -0,0 +1,58 @@ +import pytest +import logging +from pinecone import Pinecone, PineconeApiValueError, PineconeApiException + +logger = logging.getLogger(__name__) + + +class TestRestoreJobList: + def test_list_restore_jobs_no_arguments(self, pc: Pinecone): + restore_jobs = pc.db.restore_job.list() + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + + # This assumes the backup test has been run at least once + # in the same project. + assert len(restore_jobs.data) >= 1 + + def test_list_restore_jobs_with_optional_arguments(self, pc: Pinecone): + restore_jobs = pc.db.restore_job.list(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = pc.db.restore_job.list( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + def test_list_restore_jobs_legacy_syntax(self, pc: Pinecone): + restore_jobs = pc.list_restore_jobs(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = pc.list_restore_jobs(limit=2, pagination_token=restore_jobs.pagination.next) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + +class TestRestoreJobListErrors: + def test_list_restore_jobs_with_invalid_limit(self, pc: Pinecone): + with pytest.raises(PineconeApiValueError): + pc.db.restore_job.list(limit=-1) + + def test_list_restore_jobs_with_invalid_pagination_token(self, pc: Pinecone): + with pytest.raises(PineconeApiException): + pc.db.restore_job.list(pagination_token="invalid") diff --git a/tests/integration/control/serverless/conftest.py b/tests/integration/control/serverless/conftest.py index d32d26fc..d1d880d2 100644 --- a/tests/integration/control/serverless/conftest.py +++ b/tests/integration/control/serverless/conftest.py @@ -11,10 +11,7 @@ @pytest.fixture() def client(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) + return Pinecone() @pytest.fixture() diff --git a/tests/integration/control/serverless/test_create_index_for_model_errors.py b/tests/integration/control/serverless/test_create_index_for_model_errors.py index c08c581d..0fa372d5 100644 --- a/tests/integration/control/serverless/test_create_index_for_model_errors.py +++ b/tests/integration/control/serverless/test_create_index_for_model_errors.py @@ -40,6 +40,7 @@ def test_invalid_cloud(self, client, index_name): ) assert "Invalid value for `cloud`" in str(e.value) + @pytest.mark.skip(reason="This seems to not raise an error in preprod-aws-0") def test_invalid_region(self, client, index_name): with pytest.raises(PineconeApiException) as e: client.create_index_for_model( diff --git a/tests/integration/control/serverless/test_describe_index.py b/tests/integration/control/serverless/test_describe_index.py index 18d22372..87f0876c 100644 --- a/tests/integration/control/serverless/test_describe_index.py +++ b/tests/integration/control/serverless/test_describe_index.py @@ -26,7 +26,7 @@ def test_describe_index_when_ready(self, client, ready_sl_index, create_sl_index assert description.status.ready == True def test_describe_index_when_not_ready(self, client, notready_sl_index, create_sl_index_params): - description = client.describe_index(notready_sl_index) + description = client.describe_index(name=notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index diff --git a/tests/integration/control_asyncio/conftest.py b/tests/integration/control_asyncio/conftest.py index 99cf3c91..acbcce0b 100644 --- a/tests/integration/control_asyncio/conftest.py +++ b/tests/integration/control_asyncio/conftest.py @@ -1,10 +1,8 @@ import pytest import time import random -import asyncio from ..helpers import get_environment_var, generate_index_name import logging -from typing import Callable, Optional, Awaitable, Union from pinecone import ( CloudProvider, @@ -37,79 +35,6 @@ def build_pc(): return build_client -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - @pytest.fixture() def serverless_cloud(): return get_environment_var("SERVERLESS_CLOUD", "aws") @@ -201,6 +126,6 @@ def cleanup(client, index_name): try: logger.debug("Attempting to delete index with name: " + index_name) - client.delete_index(index_name, -1) + client.delete_index(name=index_name, timeout=-1) except Exception: pass diff --git a/tests/integration/control_asyncio/resources/__init__.py b/tests/integration/control_asyncio/resources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/resources/backup/__init__.py b/tests/integration/control_asyncio/resources/backup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/resources/backup/test_backup.py b/tests/integration/control_asyncio/resources/backup/test_backup.py new file mode 100644 index 00000000..76b720f4 --- /dev/null +++ b/tests/integration/control_asyncio/resources/backup/test_backup.py @@ -0,0 +1,201 @@ +import pytest +import random +import asyncio +from ....helpers import random_string +import logging +from pinecone import PineconeAsyncio + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestBackups: + async def test_create_backup(self, ready_sl_index, index_tags): + async with PineconeAsyncio() as pc: + desc = await pc.db.index.describe(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + async with pc.IndexAsyncio(host=desc.host) as idx: + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + await idx.upsert( + vectors=[ + { + "id": random_string(15), + "values": [random.random() for _ in range(dimension)], + } + for _ in range(batch_size) + ], + namespace=ns, + ) + + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + await asyncio.sleep(180) + + index_stats = await idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = await pc.db.backup.create(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = await pc.db.backup.describe(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = await pc.db.backup.describe(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + await asyncio.sleep(5) + + # Verify that the backup shows in list + backups_list = await pc.db.backup.list(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = await pc.db.index.create_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = await pc.db.restore_job.list() + assert len(restore_jobs) >= 1 + + # Verify that the new index has the same data as the original index + new_desc = await pc.db.index.describe(name=new_index_name) + async with pc.IndexAsyncio(host=new_desc.host) as new_idx: + stats = await new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + await pc.db.index.delete(name=new_index_name) + + # Delete the backup + await pc.db.backup.delete(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + await pc.db.backup.describe(backup_id=backup.backup_id) + + async def test_create_backup_legacy_syntax(self, ready_sl_index, index_tags): + async with PineconeAsyncio() as pc: + desc = await pc.describe_index(name=ready_sl_index) + dimension = desc.dimension + + # Upsert some sample data + ns = random_string(10) + async with pc.IndexAsyncio(host=desc.host) as idx: + batch_size = 100 + num_batches = 10 + for _ in range(num_batches): + await idx.upsert( + vectors=[ + { + "id": random_string(15), + "values": [random.random() for _ in range(dimension)], + } + for _ in range(batch_size) + ], + namespace=ns, + ) + + logger.debug("Sleeping for 180 seconds to ensure vectors are indexed") + await asyncio.sleep(180) + + index_stats = await idx.describe_index_stats() + logger.debug(f"Index stats for index {ready_sl_index}: {index_stats}") + + backup_name = "backup-" + random_string(10) + backup = await pc.create_backup(backup_name=backup_name, index_name=ready_sl_index) + assert backup.backup_id is not None + assert backup.name == backup_name + assert backup.source_index_name == ready_sl_index + + # Describe the backup + backup_desc = await pc.describe_backup(backup_id=backup.backup_id) + assert backup_desc.name == backup_name + assert backup_desc.backup_id == backup.backup_id + assert backup_desc.source_index_name == ready_sl_index + logger.info(f"Backup description: {backup_desc}") + + # Wait for the backup to be ready before proceeding + backup_ready = False + max_wait = 5 * 60 + while not backup_ready: + backup_desc = await pc.describe_backup(backup_id=backup.backup_id) + logger.info(f"Backup description: {backup_desc}") + if backup_desc.status == "Ready": + backup_ready = True + else: + if max_wait <= 0: + raise Exception("Backup did not become ready in time") + max_wait -= 5 + await asyncio.sleep(5) + + # Verify that the backup shows in list + backups_list = await pc.list_backups(index_name=ready_sl_index) + assert len(backups_list) >= 1 + assert any(b.name == backup_name for b in backups_list) + assert any(b.backup_id == backup.backup_id for b in backups_list) + assert any(b.source_index_name == ready_sl_index for b in backups_list) + + # Create index from backup + new_index_name = "from-backup-" + random_string(10) + new_index = await pc.create_index_from_backup( + name=new_index_name, backup_id=backup.backup_id, tags=index_tags + ) + assert new_index.name == new_index_name + assert new_index.tags is not None + assert new_index.dimension == desc.dimension + assert new_index.metric == desc.metric + + # Can list restore jobs + restore_jobs = await pc.list_restore_jobs() + assert len(restore_jobs) >= 1 + + # Verify that the new index has the same data as the original index + new_desc = await pc.db.index.describe(name=new_index_name) + async with pc.IndexAsyncio(host=new_desc.host) as new_idx: + stats = await new_idx.describe_index_stats() + logger.info(f"New index stats: {stats}") + assert stats.namespaces[ns].vector_count == batch_size * num_batches + + # Delete the new index + await pc.delete_index(name=new_index_name) + + # Delete the backup + await pc.delete_backup(backup_id=backup.backup_id) + + # Verify that the backup is deleted + with pytest.raises(Exception): + await pc.describe_backup(backup_id=backup.backup_id) + + # Verify that the new index is deleted + backup_list = await pc.list_backups(index_name=ready_sl_index) + assert len(backup_list) == 0 diff --git a/tests/integration/control_asyncio/resources/conftest.py b/tests/integration/control_asyncio/resources/conftest.py new file mode 100644 index 00000000..f7135575 --- /dev/null +++ b/tests/integration/control_asyncio/resources/conftest.py @@ -0,0 +1,66 @@ +import pytest +import uuid +import logging +import dotenv +import os +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control_asyncio/resources/index/__init__.py b/tests/integration/control_asyncio/resources/index/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/resources/index/conftest.py b/tests/integration/control_asyncio/resources/index/conftest.py new file mode 100644 index 00000000..de50f077 --- /dev/null +++ b/tests/integration/control_asyncio/resources/index/conftest.py @@ -0,0 +1,18 @@ +import pytest + +from pinecone import CloudProvider, AwsRegion, ServerlessSpec + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} diff --git a/tests/integration/control_asyncio/resources/index/test_create.py b/tests/integration/control_asyncio/resources/index/test_create.py new file mode 100644 index 00000000..9643e3a5 --- /dev/null +++ b/tests/integration/control_asyncio/resources/index/test_create.py @@ -0,0 +1,178 @@ +import pytest +from pinecone import ( + PineconeAsyncio, + Metric, + VectorType, + DeletionProtection, + ServerlessSpec, + CloudProvider, + AwsRegion, +) + + +@pytest.mark.asyncio +class TestAsyncioCreateIndex: + @pytest.mark.parametrize("spec_fixture", ("spec1", "spec2", "spec3")) + async def test_create_index(self, index_name, request, spec_fixture, index_tags): + pc = PineconeAsyncio() + spec = request.getfixturevalue(spec_fixture) + + resp = await pc.db.index.create(name=index_name, dimension=10, spec=spec, tags=index_tags) + + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" # default value + assert resp.vector_type == "dense" # default value + assert resp.deletion_protection == "disabled" # default value + + desc = await pc.db.index.describe(name=index_name) + assert desc.name == index_name + assert desc.dimension == 10 + assert desc.metric == "cosine" + assert desc.deletion_protection == "disabled" # default value + assert desc.vector_type == "dense" # default value + await pc.close() + + async def test_create_skip_wait(self, index_name, spec1, index_tags): + pc = PineconeAsyncio() + resp = await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, timeout=-1, tags=index_tags + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + await pc.close() + + async def test_create_infinite_wait(self, index_name, spec1, index_tags): + async with PineconeAsyncio() as pc: + resp = await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, timeout=None, tags=index_tags + ) + assert resp.name == index_name + assert resp.dimension == 10 + assert resp.metric == "cosine" + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + async def test_create_default_index_with_metric(self, index_name, metric, spec1, index_tags): + pc = PineconeAsyncio() + + await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, metric=metric, tags=index_tags + ) + desc = await pc.db.index.describe(name=index_name) + if isinstance(metric, str): + assert desc.metric == metric + else: + assert desc.metric == metric.value + assert desc.vector_type == "dense" + await pc.close() + + @pytest.mark.parametrize( + "metric_enum,vector_type_enum,dim", + [ + (Metric.COSINE, VectorType.DENSE, 10), + (Metric.EUCLIDEAN, VectorType.DENSE, 10), + (Metric.DOTPRODUCT, VectorType.SPARSE, None), + ], + ) + async def test_create_with_enum_values_and_tags( + self, index_name, metric_enum, vector_type_enum, dim, index_tags + ): + pc = PineconeAsyncio() + args = { + "name": index_name, + "metric": metric_enum, + "vector_type": vector_type_enum, + "deletion_protection": DeletionProtection.DISABLED, + "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), + "tags": index_tags, + } + if dim is not None: + args["dimension"] = dim + + await pc.db.index.create(**args) + + desc = await pc.db.index.describe(name=index_name) + assert desc.metric == metric_enum.value + assert desc.vector_type == vector_type_enum.value + assert desc.dimension == dim + assert desc.deletion_protection == DeletionProtection.DISABLED.value + assert desc.name == index_name + assert desc.spec.serverless.cloud == "aws" + assert desc.spec.serverless.region == "us-east-1" + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) + await pc.close() + + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) + async def test_create_dense_index_with_metric(self, index_name, spec1, metric, index_tags): + pc = PineconeAsyncio() + + await pc.db.index.create( + name=index_name, + dimension=10, + spec=spec1, + metric=metric, + vector_type=VectorType.DENSE, + tags=index_tags, + ) + + desc = await pc.db.index.describe(name=index_name) + assert desc.metric == metric + assert desc.vector_type == "dense" + await pc.close() + + async def test_create_with_optional_tags(self, index_name, spec1, index_tags): + pc = PineconeAsyncio() + + await pc.db.index.create(name=index_name, dimension=10, spec=spec1, tags=index_tags) + + desc = await pc.db.index.describe(name=index_name) + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) + await pc.close() + + async def test_create_sparse_index(self, index_name, spec1, index_tags): + pc = PineconeAsyncio() + + await pc.db.index.create( + name=index_name, + spec=spec1, + metric=Metric.DOTPRODUCT, + vector_type=VectorType.SPARSE, + tags=index_tags, + ) + + desc = await pc.db.index.describe(name=index_name) + assert desc.vector_type == "sparse" + assert desc.dimension is None + assert desc.metric == "dotproduct" + await pc.db.index.delete(name=index_name) + await pc.close() + + async def test_create_with_deletion_protection(self, index_name, spec1, index_tags): + pc = PineconeAsyncio() + + await pc.db.index.create( + name=index_name, + spec=spec1, + metric=Metric.DOTPRODUCT, + vector_type=VectorType.SPARSE, + deletion_protection=DeletionProtection.ENABLED, + tags=index_tags, + ) + + desc = await pc.db.index.describe(name=index_name) + assert desc.deletion_protection == "enabled" + assert desc.metric == "dotproduct" + assert desc.vector_type == "sparse" + assert desc.dimension is None + + with pytest.raises(Exception): + await pc.delete_index(index_name) + + await pc.configure_index(index_name, deletion_protection=DeletionProtection.DISABLED) + + desc2 = await pc.db.index.describe(name=index_name) + assert desc2.deletion_protection == "disabled" + await pc.close() diff --git a/tests/integration/control_asyncio/resources/restore_job/__init__.py b/tests/integration/control_asyncio/resources/restore_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/control_asyncio/resources/restore_job/test_describe.py b/tests/integration/control_asyncio/resources/restore_job/test_describe.py new file mode 100644 index 00000000..d32595cf --- /dev/null +++ b/tests/integration/control_asyncio/resources/restore_job/test_describe.py @@ -0,0 +1,42 @@ +import pytest +from pinecone import PineconeAsyncio, PineconeApiException +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestRestoreJobDescribe: + async def test_describe_restore_job(self): + async with PineconeAsyncio() as pc: + jobs = await pc.db.restore_job.list() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = await pc.db.restore_job.describe(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + assert restore_job.restore_job_id == restore_job_id + assert restore_job.backup_id is not None + assert isinstance(restore_job.status, str) + assert isinstance(restore_job.backup_id, str) + assert isinstance(restore_job.completed_at, datetime) + assert isinstance(restore_job.created_at, datetime) + assert isinstance(restore_job.percent_complete, float) + assert isinstance(restore_job.target_index_id, str) + assert isinstance(restore_job.target_index_name, str) + + async def test_describe_restore_job_legacy_syntax(self): + async with PineconeAsyncio() as pc: + jobs = await pc.list_restore_jobs() + assert len(jobs.data) >= 1 + + restore_job_id = jobs.data[0].restore_job_id + restore_job = await pc.describe_restore_job(job_id=restore_job_id) + logger.debug(f"Restore job: {restore_job}") + + async def test_describe_restore_job_with_invalid_job_id(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiException): + await pc.db.restore_job.describe(job_id="invalid") diff --git a/tests/integration/control_asyncio/resources/restore_job/test_list.py b/tests/integration/control_asyncio/resources/restore_job/test_list.py new file mode 100644 index 00000000..0e0814da --- /dev/null +++ b/tests/integration/control_asyncio/resources/restore_job/test_list.py @@ -0,0 +1,67 @@ +import pytest +import logging +from pinecone import PineconeAsyncio, PineconeApiValueError, PineconeApiException + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestRestoreJobList: + async def test_list_restore_jobs_no_arguments(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.db.restore_job.list() + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + + # This assumes the backup test has been run at least once + # in the same project. + assert len(restore_jobs.data) >= 1 + + async def test_list_restore_jobs_with_optional_arguments(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.db.restore_job.list(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = await pc.db.restore_job.list( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + async def test_list_restore_jobs_legacy_syntax(self): + async with PineconeAsyncio() as pc: + restore_jobs = await pc.list_restore_jobs(limit=2) + assert restore_jobs.data is not None + logger.debug(f"Restore jobs count: {len(restore_jobs.data)}") + assert len(restore_jobs.data) <= 2 + + if len(restore_jobs.data) == 2: + logger.debug(f"Restore jobs pagination: {restore_jobs.pagination}") + assert restore_jobs.pagination is not None + assert restore_jobs.pagination.next is not None + + next_page = await pc.list_restore_jobs( + limit=2, pagination_token=restore_jobs.pagination.next + ) + assert next_page.data is not None + assert len(next_page.data) <= 2 + + +@pytest.mark.asyncio +class TestRestoreJobListErrors: + async def test_list_restore_jobs_with_invalid_limit(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiValueError): + await pc.db.restore_job.list(limit=-1) + + async def test_list_restore_jobs_with_invalid_pagination_token(self): + async with PineconeAsyncio() as pc: + with pytest.raises(PineconeApiException): + await pc.db.restore_job.list(pagination_token="invalid") diff --git a/tests/integration/control_asyncio/test_create_index.py b/tests/integration/control_asyncio/test_create_index.py index 334ba86a..683c53a8 100644 --- a/tests/integration/control_asyncio/test_create_index.py +++ b/tests/integration/control_asyncio/test_create_index.py @@ -122,18 +122,16 @@ async def test_create_with_optional_tags(self, index_name, spec1): await pc.close() async def test_create_sparse_index(self, index_name, spec1): - pc = PineconeAsyncio() - - await pc.create_index( - name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE - ) - - desc = await pc.describe_index(index_name) - assert desc.vector_type == "sparse" - assert desc.dimension is None - assert desc.vector_type == "sparse" - assert desc.metric == "dotproduct" - await pc.close() + async with PineconeAsyncio() as pc: + await pc.create_index( + name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE + ) + + desc = await pc.describe_index(index_name) + assert desc.vector_type == "sparse" + assert desc.dimension is None + assert desc.vector_type == "sparse" + assert desc.metric == "dotproduct" async def test_create_with_deletion_protection(self, index_name, spec1): pc = PineconeAsyncio() diff --git a/tests/integration/control_asyncio/test_create_index_for_model_errors.py b/tests/integration/control_asyncio/test_create_index_for_model_errors.py index 36f93c7b..2d104a25 100644 --- a/tests/integration/control_asyncio/test_create_index_for_model_errors.py +++ b/tests/integration/control_asyncio/test_create_index_for_model_errors.py @@ -48,6 +48,7 @@ async def test_invalid_cloud(self, index_name): assert "Invalid value for `cloud`" in str(e.value) await pc.close() + @pytest.mark.skip(reason="This seems to not raise an error in preprod-aws-0") async def test_invalid_region(self, index_name): pc = PineconeAsyncio() diff --git a/tests/integration/control_asyncio/test_describe_index.py b/tests/integration/control_asyncio/test_describe_index.py index f9c5614d..e8c4aff3 100644 --- a/tests/integration/control_asyncio/test_describe_index.py +++ b/tests/integration/control_asyncio/test_describe_index.py @@ -31,7 +31,7 @@ async def test_describe_index_when_ready(self, ready_sl_index, create_sl_index_p async def test_describe_index_when_not_ready(self, notready_sl_index, create_sl_index_params): pc = PineconeAsyncio() - description = await pc.describe_index(notready_sl_index) + description = await pc.describe_index(name=notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index diff --git a/tests/integration/data/conftest.py b/tests/integration/data/conftest.py index c7498cb8..9fa7b997 100644 --- a/tests/integration/data/conftest.py +++ b/tests/integration/data/conftest.py @@ -1,12 +1,20 @@ import pytest import os import json -from ..helpers import get_environment_var, generate_index_name +import uuid +from ..helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper import logging from pinecone import EmbedModel, CloudProvider, AwsRegion, IndexEmbed logger = logging.getLogger(__name__) +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture(scope="session") +def index_tags(request): + return index_tags_helper(request, RUN_ID) + def api_key(): return get_environment_var("PINECONE_API_KEY") @@ -90,7 +98,7 @@ def model_idx(client, model_index_name, model_index_host): @pytest.fixture(scope="session") -def model_index_host(model_index_name): +def model_index_host(model_index_name, index_tags): pc = build_client() if model_index_name not in pc.list_indexes().names(): @@ -104,6 +112,7 @@ def model_index_host(model_index_name): field_map={"text": "my_text_field"}, metric="cosine", ), + tags=index_tags, ) else: logger.info(f"Index {model_index_name} already exists") @@ -116,12 +125,12 @@ def model_index_host(model_index_name): @pytest.fixture(scope="session") -def index_host(index_name, metric, spec): +def index_host(index_name, metric, spec, index_tags): pc = build_client() if index_name not in pc.list_indexes().names(): logger.info(f"Creating index {index_name}") - pc.create_index(name=index_name, dimension=2, metric=metric, spec=spec) + pc.create_index(name=index_name, dimension=2, metric=metric, spec=spec, tags=index_tags) else: logger.info(f"Index {index_name} already exists") @@ -133,13 +142,17 @@ def index_host(index_name, metric, spec): @pytest.fixture(scope="session") -def sparse_index_host(sparse_index_name, spec): +def sparse_index_host(sparse_index_name, spec, index_tags): pc = build_client() if sparse_index_name not in pc.list_indexes().names(): logger.info(f"Creating index {sparse_index_name}") pc.create_index( - name=sparse_index_name, metric="dotproduct", spec=spec, vector_type="sparse" + name=sparse_index_name, + metric="dotproduct", + spec=spec, + vector_type="sparse", + tags=index_tags, ) else: logger.info(f"Index {sparse_index_name} already exists") diff --git a/tests/integration/data/seed.py b/tests/integration/data/seed.py index 827aea9a..2019761e 100644 --- a/tests/integration/data/seed.py +++ b/tests/integration/data/seed.py @@ -1,6 +1,5 @@ from ..helpers import poll_fetch_for_ids_in_namespace, embedding_values from pinecone import Vector - import itertools @@ -133,3 +132,6 @@ def setup_weird_ids_data(idx, target_namespace, wait): for i in range(0, len(weird_ids), batch_size): chunk = weird_ids[i : i + batch_size] idx.upsert(vectors=[(x, embedding_values(2)) for x in chunk], namespace=target_namespace) + + if wait: + poll_fetch_for_ids_in_namespace(idx, ids=weird_ids, namespace=target_namespace) diff --git a/tests/integration/data/test_query.py b/tests/integration/data/test_query.py index 99971f8d..2a40968e 100644 --- a/tests/integration/data/test_query.py +++ b/tests/integration/data/test_query.py @@ -2,6 +2,7 @@ from pinecone import QueryResponse, Vector from ..helpers import embedding_values, poll_fetch_for_ids_in_namespace, random_string import logging +import time logger = logging.getLogger(__name__) @@ -62,6 +63,7 @@ def seed(idx, namespace): def seed_for_query(idx, query_namespace): seed(idx, query_namespace) seed(idx, "") + time.sleep(30) yield diff --git a/tests/integration/data/test_query_namespaces_sparse.py b/tests/integration/data/test_query_namespaces_sparse.py index 607798ea..958368b5 100644 --- a/tests/integration/data/test_query_namespaces_sparse.py +++ b/tests/integration/data/test_query_namespaces_sparse.py @@ -1,6 +1,6 @@ import pytest from ..helpers import random_string, poll_stats_for_namespace -from pinecone.data.query_results_aggregator import QueryResultsAggregatorInvalidTopKError +from pinecone.db_data.query_results_aggregator import QueryResultsAggregatorInvalidTopKError from pinecone import Vector, SparseValues diff --git a/tests/integration/data/test_search_and_upsert_records.py b/tests/integration/data/test_search_and_upsert_records.py index e83a5cd8..0a269a49 100644 --- a/tests/integration/data/test_search_and_upsert_records.py +++ b/tests/integration/data/test_search_and_upsert_records.py @@ -6,7 +6,7 @@ import os from pinecone import RerankModel, PineconeApiException -from pinecone.data import _Index +from pinecone.db_data import _Index logger = logging.getLogger(__name__) diff --git a/tests/integration/data/test_upsert_from_dataframe.py b/tests/integration/data/test_upsert_from_dataframe.py index 49bc9abc..4534bc4f 100644 --- a/tests/integration/data/test_upsert_from_dataframe.py +++ b/tests/integration/data/test_upsert_from_dataframe.py @@ -1,5 +1,5 @@ import pandas as pd -from pinecone.data import _Index +from pinecone.db_data import _Index from ..helpers import embedding_values, random_string diff --git a/tests/integration/data_asyncio/conftest.py b/tests/integration/data_asyncio/conftest.py index 6401e073..9769a5e9 100644 --- a/tests/integration/data_asyncio/conftest.py +++ b/tests/integration/data_asyncio/conftest.py @@ -2,7 +2,7 @@ import json import asyncio from ..helpers import get_environment_var, generate_index_name -from pinecone.data import _IndexAsyncio +from pinecone.db_data import _IndexAsyncio import logging from typing import Callable, Optional, Awaitable, Union diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py index f233d089..afe12395 100644 --- a/tests/integration/helpers/__init__.py +++ b/tests/integration/helpers/__init__.py @@ -8,4 +8,8 @@ poll_fetch_for_ids_in_namespace, embedding_values, jsonprint, + index_tags, + delete_backups_from_run, + delete_indexes_from_run, + default_create_index_params, ) diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 480585e5..8cb069dd 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -4,11 +4,14 @@ import random import string import logging +import uuid +import asyncio from typing import Any from datetime import datetime import json -from pinecone.data import _Index -from typing import List +from pinecone.db_data import _Index +from pinecone import Pinecone, NotFoundException, PineconeApiException +from typing import List, Callable, Awaitable, Optional, Union logger = logging.getLogger(__name__) @@ -91,6 +94,10 @@ def poll_stats_for_namespace( raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t + logger.debug(f"Found index stats: {stats}.") + logger.debug( + f"Waiting for {expected_count} vectors in namespace {namespace}. Found {stats.namespaces.get(namespace, {'vector_count': 0})['vector_count']} vectors." + ) time.sleep(delta_t) @@ -123,3 +130,164 @@ def fake_api_key(): def jsonprint(obj): print(json.dumps(obj.to_dict(), indent=2)) + + +def index_tags(request, run_id): + test_name = request.node.name + if test_name is None: + test_name = "" + else: + test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") + + tags = { + "test-suite": "pinecone-python-client", + "test-run": run_id, + "test": test_name, + "created-at": datetime.now().strftime("%Y-%m-%d"), + } + + if os.getenv("USER"): + tags["user"] = os.getenv("USER") + return tags + + +def delete_backups_from_run(pc: Pinecone, run_id: str): + for backup in pc.db.backup.list(): + if backup.tags is not None and backup.tags.get("test-run") == run_id: + pc.db.backup.delete(backup_id=backup.backup_id) + else: + logger.info(f"Backup {backup.name} is not a test backup from run {run_id}. Skipping.") + + +def delete_indexes_from_run(pc: Pinecone, run_id: str): + indexes_to_delete = [] + + for index in pc.db.index.list(): + if index.tags is not None and index.tags.get("test-run") == run_id: + logger.info(f"Found index {index.name} to delete") + if index.deletion_protection == "enabled": + logger.info(f"Index {index.name} has deletion protection enabled. Disabling...") + pc.update_index(index.name, deletion_protection="disabled") + else: + logger.debug( + f"Index {index.name} has deletion protection disabled. Proceeding to delete." + ) + + indexes_to_delete.append(index.name) + else: + logger.info(f"Index {index.name} is not a test index from run {run_id}. Skipping.") + + for index_name in indexes_to_delete: + delete_index_with_retry(client=pc, index_name=index_name, retries=3, sleep_interval=10) + + +def delete_index_with_retry( + client: Pinecone, index_name: str, retries: int = 0, sleep_interval: int = 5 +): + logger.info( + f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_index_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + raise "Unable to delete index " + index_name + + +async def asyncio_poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def asyncio_wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +def default_create_index_params(request, run_id): + github_actor = os.getenv("GITHUB_ACTOR", None) + user = os.getenv("USER", None) + index_owner = github_actor or user or "unknown" + + index_name = f"{index_owner}-{str(uuid.uuid4())}" + tags = index_tags(request, run_id) + cloud = get_environment_var("SERVERLESS_CLOUD", "aws") + region = get_environment_var("SERVERLESS_REGION", "us-west-2") + + spec = {"serverless": {"cloud": cloud, "region": region}} + return {"name": index_name, "dimension": 10, "metric": "cosine", "spec": spec, "tags": tags} diff --git a/tests/integration/inference/asyncio/__init__.py b/tests/integration/inference/asyncio/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/inference/test_asyncio_inference.py b/tests/integration/inference/asyncio/test_embeddings.py similarity index 51% rename from tests/integration/inference/test_asyncio_inference.py rename to tests/integration/inference/asyncio/test_embeddings.py index 2870d1b3..423795bd 100644 --- a/tests/integration/inference/test_asyncio_inference.py +++ b/tests/integration/inference/asyncio/test_embeddings.py @@ -1,5 +1,5 @@ import pytest -from pinecone import PineconeAsyncio, PineconeApiException, RerankModel, EmbedModel +from pinecone import PineconeAsyncio, PineconeApiException, EmbedModel @pytest.mark.asyncio @@ -28,7 +28,7 @@ async def test_create_embeddings(self, model_input, model_output): individual_embedding = embeddings[0] assert len(individual_embedding.values) == 1024 - assert individual_embedding.vector_type.value == "dense" + assert individual_embedding.vector_type == "dense" assert len(individual_embedding["values"]) == 1024 await pc.close() @@ -132,126 +132,3 @@ async def test_can_attempt_to_use_unknown_models(self): ) assert "Model 'unknown-model' not found" in str(excinfo.value) await pc.close() - - -@pytest.mark.asyncio -class TestRerankAsyncio: - @pytest.mark.parametrize( - "model_input,model_output", - [ - (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), - ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), - ], - ) - async def test_rerank_basic(self, model_input, model_output): - # Rerank model can be passed as string or enum - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model=model_input, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model_output - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_basic_document_dicts(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "text": "dogs are pretty cool"}, - {"id": "789", "text": "I'm a cat person"}, - {"id": "456", "text": "everyone loves dogs"}, - ], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 2 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_document_dicts_custom_field(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "my_field": "dogs are pretty cool"}, - {"id": "456", "my_field": "everyone loves dogs"}, - {"id": "789", "my_field": "I'm a cat person"}, - ], - rank_fields=["my_field"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.my_field == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_basic_default_top_n(self): - model = "bge-reranker-v2-m3" - pc = PineconeAsyncio() - result = await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=True, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_no_return_documents(self): - pc = PineconeAsyncio() - model = pc.inference.RerankModel.Bge_Reranker_V2_M3 - result = await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert not result.data[0].document - assert result.model == model.value - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - await pc.close() - - async def test_rerank_allows_unknown_models_to_be_passed(self): - pc = PineconeAsyncio() - - # We don't want to reject these requests client side because we want - # to remain forwards compatible with any new models that become available - model = "unknown-model" - with pytest.raises(PineconeApiException) as excinfo: - await pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert "Model 'unknown-model' not found" in str(excinfo.value) - await pc.close() diff --git a/tests/integration/inference/asyncio/test_models.py b/tests/integration/inference/asyncio/test_models.py new file mode 100644 index 00000000..984c6835 --- /dev/null +++ b/tests/integration/inference/asyncio/test_models.py @@ -0,0 +1,88 @@ +import pytest +from pinecone import PineconeAsyncio +import logging + +logger = logging.getLogger(__name__) + + +@pytest.mark.asyncio +class TestListModels: + async def test_list_models(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models() + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + assert models[0].type is not None + assert models[0].supported_parameters is not None + assert models[0].modality is not None + assert models[0].max_sequence_length is not None + assert models[0].max_batch_size is not None + assert models[0].provider_name is not None + + async def test_list_models_with_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(type="embed") + assert len(models) > 0 + assert models[0].type == "embed" + + models2 = await pc.inference.list_models(type="rerank") + assert len(models2) > 0 + assert models2[0].type == "rerank" + + async def test_list_models_with_vector_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(vector_type="dense") + assert len(models) > 0 + assert models[0].vector_type == "dense" + + models2 = await pc.inference.list_models(vector_type="sparse") + assert len(models2) > 0 + assert models2[0].vector_type == "sparse" + + async def test_list_models_with_type_and_vector_type(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models(type="embed", vector_type="dense") + assert len(models) > 0 + assert models[0].type == "embed" + assert models[0].vector_type == "dense" + + async def test_list_models_new_syntax(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.model.list(type="embed", vector_type="dense") + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + + +@pytest.mark.asyncio +class TestGetModel: + async def test_get_model(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.list_models() + first_model = models[0] + + model = await pc.inference.get_model(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length + assert model.max_batch_size == first_model.max_batch_size + assert model.provider_name == first_model.provider_name + + async def test_get_model_new_syntax(self): + async with PineconeAsyncio() as pc: + models = await pc.inference.model.list() + first_model = models[0] + + model = await pc.inference.model.get(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length diff --git a/tests/integration/inference/asyncio/test_rerank.py b/tests/integration/inference/asyncio/test_rerank.py new file mode 100644 index 00000000..9009f262 --- /dev/null +++ b/tests/integration/inference/asyncio/test_rerank.py @@ -0,0 +1,125 @@ +import pytest +from pinecone import PineconeAsyncio, PineconeApiException, RerankModel + + +@pytest.mark.asyncio +class TestRerankAsyncio: + @pytest.mark.parametrize( + "model_input,model_output", + [ + (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), + ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), + ], + ) + async def test_rerank_basic(self, model_input, model_output): + # Rerank model can be passed as string or enum + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model=model_input, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model_output + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_basic_document_dicts(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "text": "dogs are pretty cool"}, + {"id": "789", "text": "I'm a cat person"}, + {"id": "456", "text": "everyone loves dogs"}, + ], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 2 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_document_dicts_custom_field(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "my_field": "dogs are pretty cool"}, + {"id": "456", "my_field": "everyone loves dogs"}, + {"id": "789", "my_field": "I'm a cat person"}, + ], + rank_fields=["my_field"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.my_field == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_basic_default_top_n(self): + model = "bge-reranker-v2-m3" + pc = PineconeAsyncio() + result = await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=True, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_no_return_documents(self): + pc = PineconeAsyncio() + model = pc.inference.RerankModel.Bge_Reranker_V2_M3 + result = await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert not result.data[0].document + assert result.model == model.value + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + await pc.close() + + async def test_rerank_allows_unknown_models_to_be_passed(self): + pc = PineconeAsyncio() + + # We don't want to reject these requests client side because we want + # to remain forwards compatible with any new models that become available + model = "unknown-model" + with pytest.raises(PineconeApiException) as excinfo: + await pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert "Model 'unknown-model' not found" in str(excinfo.value) + await pc.close() diff --git a/tests/integration/inference/sync/__init__.py b/tests/integration/inference/sync/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/inference/test_inference.py b/tests/integration/inference/sync/test_embeddings.py similarity index 51% rename from tests/integration/inference/test_inference.py rename to tests/integration/inference/sync/test_embeddings.py index abf73d85..f779ad8c 100644 --- a/tests/integration/inference/test_inference.py +++ b/tests/integration/inference/sync/test_embeddings.py @@ -1,5 +1,5 @@ import pytest -from pinecone import Pinecone, PineconeApiException, RerankModel, EmbedModel +from pinecone import Pinecone, PineconeApiException, EmbedModel class TestEmbed: @@ -27,7 +27,7 @@ def test_create_embeddings(self, model_input, model_output): individual_embedding = embeddings[0] assert len(individual_embedding.values) == 1024 - assert individual_embedding.vector_type.value == "dense" + assert individual_embedding.vector_type == "dense" assert len(individual_embedding["values"]) == 1024 def test_embedding_result_is_iterable(self): @@ -122,119 +122,3 @@ def test_can_attempt_to_use_unknown_models(self): parameters={"input_type": "query", "truncate": "END"}, ) assert "Model 'unknown-model' not found" in str(excinfo.value) - - -class TestRerank: - @pytest.mark.parametrize( - "model_input,model_output", - [ - (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), - ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), - ], - ) - def test_rerank_basic(self, model_input, model_output): - # Rerank model can be passed as string or enum - pc = Pinecone() - result = pc.inference.rerank( - model=model_input, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model_output - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_basic_document_dicts(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "text": "dogs are pretty cool"}, - {"id": "789", "text": "I'm a cat person"}, - {"id": "456", "text": "everyone loves dogs"}, - ], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 2 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_document_dicts_custom_field(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model="bge-reranker-v2-m3", - query="i love dogs", - documents=[ - {"id": "123", "my_field": "dogs are pretty cool"}, - {"id": "456", "my_field": "everyone loves dogs"}, - {"id": "789", "my_field": "I'm a cat person"}, - ], - rank_fields=["my_field"], - top_n=1, - return_documents=True, - ) - assert len(result.data) == 1 - assert result.data[0].index == 1 - assert result.data[0].document.my_field == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_basic_default_top_n(self): - model = "bge-reranker-v2-m3" - pc = Pinecone() - result = pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=True, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert result.data[0].document.text == "everyone loves dogs" - assert result.model == model - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_no_return_documents(self): - pc = Pinecone() - model = pc.inference.RerankModel.Bge_Reranker_V2_M3 - result = pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert len(result.data) == 3 - assert result.data[0].index == 1 - assert not result.data[0].document - assert result.model == model.value - assert isinstance(result.usage.rerank_units, int) - assert result.usage.rerank_units == 1 - - def test_rerank_allows_unknown_models_to_be_passed(self): - pc = Pinecone() - - # We don't want to reject these requests client side because we want - # to remain forwards compatible with any new models that become available - model = "unknown-model" - with pytest.raises(PineconeApiException) as excinfo: - pc.inference.rerank( - model=model, - query="i love dogs", - documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], - return_documents=False, - ) - assert "Model 'unknown-model' not found" in str(excinfo.value) diff --git a/tests/integration/inference/sync/test_models.py b/tests/integration/inference/sync/test_models.py new file mode 100644 index 00000000..29640a08 --- /dev/null +++ b/tests/integration/inference/sync/test_models.py @@ -0,0 +1,98 @@ +import logging +from pinecone import Pinecone + +logger = logging.getLogger(__name__) + + +class TestListModels: + def test_list_models(self): + pc = Pinecone() + models = pc.inference.list_models() + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + assert models[0].type is not None + assert models[0].supported_parameters is not None + assert models[0].modality is not None + assert models[0].max_sequence_length is not None + assert models[0].max_batch_size is not None + assert models[0].provider_name is not None + + def test_list_models_new_syntax(self): + pc = Pinecone() + models = pc.inference.model.list(type="embed", vector_type="dense") + assert len(models) > 0 + logger.info(f"Models[0]: {models[0]}") + assert models[0].model is not None + assert models[0].short_description is not None + + def test_list_models_with_type(self): + pc = Pinecone() + models = pc.inference.list_models(type="embed") + assert len(models) > 0 + assert models[0].type == "embed" + + models2 = pc.inference.list_models(type="rerank") + assert len(models2) > 0 + assert models2[0].type == "rerank" + + def test_list_models_with_vector_type(self): + pc = Pinecone() + models = pc.inference.list_models(vector_type="dense") + assert len(models) > 0 + assert models[0].vector_type == "dense" + + models2 = pc.inference.list_models(vector_type="sparse") + assert len(models2) > 0 + assert models2[0].vector_type == "sparse" + + def test_list_models_with_type_and_vector_type(self): + pc = Pinecone() + models = pc.inference.list_models(type="embed", vector_type="dense") + assert len(models) > 0 + assert models[0].type == "embed" + assert models[0].vector_type == "dense" + + def test_model_can_be_displayed(self): + # We want to check this, since we're doing some custom + # shenanigans to the model classes to make them more user + # friendly. Want to make sure we don't break the basic + # use case of displaying the model. + pc = Pinecone() + models = pc.inference.list_models() + models.__repr__() # This should not throw + models[0].__repr__() # This should not throw + models.to_dict() # This should not throw + models[0].to_dict() # This should not throw + assert True + + +class TestGetModel: + def test_get_model(self): + pc = Pinecone() + models = pc.inference.list_models() + first_model = models[0] + + model = pc.inference.get_model(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length + assert model.max_batch_size == first_model.max_batch_size + assert model.provider_name == first_model.provider_name + + def test_get_model_new_syntax(self): + pc = Pinecone() + models = pc.inference.model.list() + first_model = models[0] + + model = pc.inference.model.get(model_name=first_model.model) + assert model.model == first_model.model + assert model.short_description == first_model.short_description + assert model.type == first_model.type + assert model.supported_parameters == first_model.supported_parameters + assert model.modality == first_model.modality + assert model.max_sequence_length == first_model.max_sequence_length diff --git a/tests/integration/inference/sync/test_rerank.py b/tests/integration/inference/sync/test_rerank.py new file mode 100644 index 00000000..7797e857 --- /dev/null +++ b/tests/integration/inference/sync/test_rerank.py @@ -0,0 +1,118 @@ +import pytest +from pinecone import Pinecone, PineconeApiException, RerankModel + + +class TestRerank: + @pytest.mark.parametrize( + "model_input,model_output", + [ + (RerankModel.Bge_Reranker_V2_M3, "bge-reranker-v2-m3"), + ("bge-reranker-v2-m3", "bge-reranker-v2-m3"), + ], + ) + def test_rerank_basic(self, model_input, model_output): + # Rerank model can be passed as string or enum + pc = Pinecone() + result = pc.inference.rerank( + model=model_input, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model_output + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_basic_document_dicts(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "text": "dogs are pretty cool"}, + {"id": "789", "text": "I'm a cat person"}, + {"id": "456", "text": "everyone loves dogs"}, + ], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 2 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_document_dicts_custom_field(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model="bge-reranker-v2-m3", + query="i love dogs", + documents=[ + {"id": "123", "my_field": "dogs are pretty cool"}, + {"id": "456", "my_field": "everyone loves dogs"}, + {"id": "789", "my_field": "I'm a cat person"}, + ], + rank_fields=["my_field"], + top_n=1, + return_documents=True, + ) + assert len(result.data) == 1 + assert result.data[0].index == 1 + assert result.data[0].document.my_field == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_basic_default_top_n(self): + model = "bge-reranker-v2-m3" + pc = Pinecone() + result = pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=True, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert result.data[0].document.text == "everyone loves dogs" + assert result.model == model + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_no_return_documents(self): + pc = Pinecone() + model = pc.inference.RerankModel.Bge_Reranker_V2_M3 + result = pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert len(result.data) == 3 + assert result.data[0].index == 1 + assert not result.data[0].document + assert result.model == model.value + assert isinstance(result.usage.rerank_units, int) + assert result.usage.rerank_units == 1 + + def test_rerank_allows_unknown_models_to_be_passed(self): + pc = Pinecone() + + # We don't want to reject these requests client side because we want + # to remain forwards compatible with any new models that become available + model = "unknown-model" + with pytest.raises(PineconeApiException) as excinfo: + pc.inference.rerank( + model=model, + query="i love dogs", + documents=["dogs are pretty cool", "everyone loves dogs", "I'm a cat person"], + return_documents=False, + ) + assert "Model 'unknown-model' not found" in str(excinfo.value) diff --git a/tests/integration/plugins/test_plugins.py b/tests/integration/plugins/test_plugins.py new file mode 100644 index 00000000..e0c36569 --- /dev/null +++ b/tests/integration/plugins/test_plugins.py @@ -0,0 +1,8 @@ +from pinecone import Pinecone + + +class TestAssistantPlugin: + def test_assistant_plugin(self): + pc = Pinecone() + pc.assistant.list_assistants() + assert True, "This should pass without errors" diff --git a/tests/perf/test_query_results_aggregator.py b/tests/perf/test_query_results_aggregator.py index 29ac4c35..9f33c149 100644 --- a/tests/perf/test_query_results_aggregator.py +++ b/tests/perf/test_query_results_aggregator.py @@ -1,5 +1,5 @@ import random -from pinecone.data.query_results_aggregator import QueryResultsAggregator +from pinecone.db_data.query_results_aggregator import QueryResultsAggregator def fake_results(i): diff --git a/tests/unit/data/test_bulk_import.py b/tests/unit/data/test_bulk_import.py index b1bcd4cc..c7ad5a14 100644 --- a/tests/unit/data/test_bulk_import.py +++ b/tests/unit/data/test_bulk_import.py @@ -6,7 +6,7 @@ ImportErrorMode as ImportErrorModeGeneratedClass, ) -from pinecone.data.features.bulk_import import ImportFeatureMixin, ImportErrorMode +from pinecone.db_data.features.bulk_import import ImportFeatureMixin, ImportErrorMode def build_client_w_faked_response(mocker, body: str, status: int = 200): diff --git a/tests/unit/data/test_request_factory.py b/tests/unit/data/test_request_factory.py index 087436c9..ea04acdf 100644 --- a/tests/unit/data/test_request_factory.py +++ b/tests/unit/data/test_request_factory.py @@ -1,5 +1,5 @@ import pytest -from pinecone.data.request_factory import ( +from pinecone.db_data.request_factory import ( IndexRequestFactory, SearchQuery, SearchQueryVector, diff --git a/tests/unit/data/test_vector_factory.py b/tests/unit/data/test_vector_factory.py index 52fd1eac..adeeaf9c 100644 --- a/tests/unit/data/test_vector_factory.py +++ b/tests/unit/data/test_vector_factory.py @@ -2,7 +2,7 @@ import pandas as pd import pytest -from pinecone.data.vector_factory import VectorFactory +from pinecone.db_data.vector_factory import VectorFactory from pinecone import Vector, SparseValues, ListConversionException from pinecone.core.openapi.db_data.models import ( Vector as OpenApiVector, diff --git a/tests/unit/db_control/test_index.py b/tests/unit/db_control/test_index.py new file mode 100644 index 00000000..5fca4b18 --- /dev/null +++ b/tests/unit/db_control/test_index.py @@ -0,0 +1,68 @@ +import json + +from pinecone.config import Config, OpenApiConfiguration + +from pinecone.db_control.resources.sync.index import IndexResource +from pinecone.openapi_support.api_client import ApiClient +from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + + +def build_client_w_faked_response(mocker, body: str, status: int = 200): + response = mocker.Mock() + response.headers = {"content-type": "application/json"} + response.status = status + # Parse the JSON string into a dict + response_data = json.loads(body) + response.data = json.dumps(response_data).encode("utf-8") + + api_client = ApiClient() + mock_request = mocker.patch.object( + api_client.rest_client.pool_manager, "request", return_value=response + ) + index_api = ManageIndexesApi(api_client=api_client) + resource = IndexResource( + index_api=index_api, + config=Config(api_key="test-api-key"), + openapi_config=OpenApiConfiguration(), + pool_threads=1, + ) + return resource, mock_request + + +class TestIndexResource: + def test_describe_index(self, mocker): + body = """ + { + "name": "test-index", + "description": "test-description", + "dimension": 1024, + "metric": "cosine", + "spec": { + "byoc": { + "environment": "test-environment" + } + }, + "vector_type": "dense", + "status": { + "ready": true, + "state": "Ready" + }, + "host": "test-host.pinecone.io", + "deletion_protection": "disabled", + "tags": { + "test-tag": "test-value" + } + } + """ + index_resource, mock_request = build_client_w_faked_response(mocker, body) + + desc = index_resource.describe(name="test-index") + assert desc.name == "test-index" + assert desc.description == "test-description" + assert desc.dimension == 1024 + assert desc.metric == "cosine" + assert desc.spec.byoc.environment == "test-environment" + assert desc.vector_type == "dense" + assert desc.status.ready == True + assert desc.deletion_protection == "disabled" + assert desc.tags["test-tag"] == "test-value" diff --git a/tests/unit/db_control/test_index_request_factory.py b/tests/unit/db_control/test_index_request_factory.py new file mode 100644 index 00000000..777486b5 --- /dev/null +++ b/tests/unit/db_control/test_index_request_factory.py @@ -0,0 +1,62 @@ +from pinecone import ByocSpec, ServerlessSpec +from pinecone.db_control.request_factory import PineconeDBControlRequestFactory + + +class TestIndexRequestFactory: + def test_create_index_request_with_spec_byoc(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec=ByocSpec(environment="test-byoc-spec-id"), + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.byoc.environment == "test-byoc-spec-id" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_serverless(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec=ServerlessSpec(cloud="aws", region="us-east-1"), + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.serverless.cloud == "aws" + assert req.spec.serverless.region == "us-east-1" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_serverless_dict(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec={"serverless": {"cloud": "aws", "region": "us-east-1"}}, + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.serverless.cloud == "aws" + assert req.spec.serverless.region == "us-east-1" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" + + def test_create_index_request_with_spec_byoc_dict(self): + req = PineconeDBControlRequestFactory.create_index_request( + name="test-index", + metric="cosine", + dimension=1024, + spec={"byoc": {"environment": "test-byoc-spec-id"}}, + ) + assert req.name == "test-index" + assert req.metric == "cosine" + assert req.dimension == 1024 + assert req.spec.byoc.environment == "test-byoc-spec-id" + assert req.vector_type == "dense" + assert req.deletion_protection.value == "disabled" diff --git a/tests/unit/models/test_index_model.py b/tests/unit/models/test_index_model.py index 7320ce8d..7aeb88d1 100644 --- a/tests/unit/models/test_index_model.py +++ b/tests/unit/models/test_index_model.py @@ -5,7 +5,7 @@ ServerlessSpec, DeletionProtection, ) -from pinecone.models import IndexModel +from pinecone.db_control.models import IndexModel from pinecone import CloudProvider, AwsRegion diff --git a/tests/unit/openapi_support/test_retries.py b/tests/unit/openapi_support/test_retries.py new file mode 100644 index 00000000..ff624938 --- /dev/null +++ b/tests/unit/openapi_support/test_retries.py @@ -0,0 +1,49 @@ +import pytest +from unittest.mock import patch, MagicMock +from urllib3.exceptions import MaxRetryError +from urllib3.util.retry import Retry +from pinecone.openapi_support.retry_urllib3 import JitterRetry + + +def test_jitter_retry_backoff(): + """Test that the backoff time includes jitter.""" + retry = JitterRetry( + total=5, + backoff_factor=0.25, + backoff_max=3, + status_forcelist=(500, 502, 503, 504), + allowed_methods=None, + ) + + # Mock the parent's get_backoff_time to return a fixed value + with patch.object(Retry, "get_backoff_time", return_value=1.0): + # Test multiple times to ensure jitter is added + backoff_times = [retry.get_backoff_time() for _ in range(100)] + + # All backoff times should be between 1.0 and 1.25 + assert all(1.0 <= t <= 1.25 for t in backoff_times) + # Values should be different (jitter is working) + assert len(set(backoff_times)) > 1 + + +def test_jitter_retry_behavior(): + """Test that retries actually occur and respect the total count.""" + retry = JitterRetry(total=3) + mock_response = MagicMock() + mock_response.status = 500 # Simulate server error + + # Simulate a failing request + with pytest.raises(MaxRetryError) as exc_info: + retry2 = retry.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry3 = retry2.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry4 = retry3.increment( + method="GET", url="http://test.com", response=mock_response, error=None + ) + retry4.increment(method="GET", url="http://test.com", response=mock_response, error=None) + + # Verify the error contains the expected information + assert "Max retries exceeded" in str(exc_info.value) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index f33519b6..90ce4c1f 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -1,7 +1,7 @@ from pinecone import Pinecone from pinecone.exceptions.exceptions import PineconeConfigurationError from pinecone.config import PineconeConfig -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration import pytest import os @@ -103,7 +103,11 @@ def test_config_pool_threads(self): pc = Pinecone( api_key="test-api-key", host="test-controller-host.pinecone.io", pool_threads=10 ) - assert pc.index_api.api_client.pool_threads == 10 + # DBControl object is created lazily, so we need to access this property + # to trigger the setup so we can inspect the config + assert pc.db is not None + + assert pc.db._index_api.api_client.pool_threads == 10 idx = pc.Index(host="my-index-host.pinecone.io", name="my-index-name") assert idx._vector_api.api_client.pool_threads == 10 @@ -111,8 +115,8 @@ def test_ssl_config_passed_to_index_client(self): proxy_headers = make_headers(proxy_basic_auth="asdf") pc = Pinecone(api_key="key", ssl_ca_certs="path/to/cert", proxy_headers=proxy_headers) - assert pc.openapi_config.ssl_ca_cert == "path/to/cert" - assert pc.openapi_config.proxy_headers == proxy_headers + assert pc._openapi_config.ssl_ca_cert == "path/to/cert" + assert pc._openapi_config.proxy_headers == proxy_headers idx = pc.Index(host="host.pinecone.io") assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" @@ -122,16 +126,16 @@ def test_host_config_not_clobbered_by_index(self): proxy_headers = make_headers(proxy_basic_auth="asdf") pc = Pinecone(api_key="key", ssl_ca_certs="path/to/cert", proxy_headers=proxy_headers) - assert pc.openapi_config.ssl_ca_cert == "path/to/cert" - assert pc.openapi_config.proxy_headers == proxy_headers - assert pc.openapi_config.host == "https://api.pinecone.io" + assert pc._openapi_config.ssl_ca_cert == "path/to/cert" + assert pc._openapi_config.proxy_headers == proxy_headers + assert pc._openapi_config.host == "https://api.pinecone.io" idx = pc.Index(host="host.pinecone.io") assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" assert idx._vector_api.api_client.configuration.proxy_headers == proxy_headers assert idx._vector_api.api_client.configuration.host == "https://host.pinecone.io" - assert pc.openapi_config.host == "https://api.pinecone.io" + assert pc._openapi_config.host == "https://api.pinecone.io" def test_proxy_config(self): pc = Pinecone( @@ -140,11 +144,15 @@ def test_proxy_config(self): ssl_ca_certs="path/to/cert-bundle.pem", ) - assert pc.config.proxy_url == "http://localhost:8080" - assert pc.config.ssl_ca_certs == "path/to/cert-bundle.pem" + assert pc._config.proxy_url == "http://localhost:8080" + assert pc._config.ssl_ca_certs == "path/to/cert-bundle.pem" + + assert pc._openapi_config.proxy == "http://localhost:8080" + assert pc._openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" - assert pc.openapi_config.proxy == "http://localhost:8080" - assert pc.openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" + # DBControl object is created lazily, so we need to access this property + # to trigger the setup so we can inspect the config + assert pc.db is not None - assert pc.index_api.api_client.configuration.proxy == "http://localhost:8080" - assert pc.index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" + assert pc.db._index_api.api_client.configuration.proxy == "http://localhost:8080" + assert pc.db._index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" diff --git a/tests/unit/test_config_builder.py b/tests/unit/test_config_builder.py index 3122c080..7307f153 100644 --- a/tests/unit/test_config_builder.py +++ b/tests/unit/test_config_builder.py @@ -1,6 +1,6 @@ import pytest -from pinecone.openapi_support.configuration import Configuration as OpenApiConfiguration +from pinecone.config.openapi_configuration import Configuration as OpenApiConfiguration from pinecone.config import ConfigBuilder from pinecone import PineconeConfigurationError diff --git a/tests/unit/test_control.py b/tests/unit/test_control.py index c0b909dd..6cce0f92 100644 --- a/tests/unit/test_control.py +++ b/tests/unit/test_control.py @@ -77,44 +77,48 @@ def index_list_response(): class TestControl: - def test_plugins_are_installed(self): + def test_plugins_are_lazily_loaded(self): with patch.object(PluginAware, "load_plugins") as mock_install_plugins: - Pinecone(api_key="asdf") + pc = Pinecone(api_key="asdf") + mock_install_plugins.assert_not_called() + with pytest.raises(AttributeError): + pc.foo() # Accessing a non-existent attribute should raise an AttributeError after PluginAware installs any applicable plugins mock_install_plugins.assert_called_once() def test_default_host(self): p = Pinecone(api_key="123-456-789") - assert p.index_api.api_client.configuration.host == "https://api.pinecone.io" + assert p.db._index_api.api_client.configuration.host == "https://api.pinecone.io" def test_passing_host(self): p = Pinecone(api_key="123-456-789", host="my-host.pinecone.io") - assert p.index_api.api_client.configuration.host == "https://my-host.pinecone.io" + assert p.db._index_api.api_client.configuration.host == "https://my-host.pinecone.io" def test_passing_additional_headers(self): extras = {"header1": "my-value", "header2": "my-value2"} p = Pinecone(api_key="123-456-789", additional_headers=extras) for key, value in extras.items(): - assert p.index_api.api_client.default_headers[key] == value - assert "User-Agent" in p.index_api.api_client.default_headers - assert "X-Pinecone-API-Version" in p.index_api.api_client.default_headers - assert "header1" in p.index_api.api_client.default_headers - assert "header2" in p.index_api.api_client.default_headers - assert len(p.index_api.api_client.default_headers) == 4 + assert p.db._index_api.api_client.default_headers[key] == value + assert "User-Agent" in p.db._index_api.api_client.default_headers + assert "X-Pinecone-API-Version" in p.db._index_api.api_client.default_headers + assert "header1" in p.db._index_api.api_client.default_headers + assert "header2" in p.db._index_api.api_client.default_headers + assert len(p.db._index_api.api_client.default_headers) == 4 def test_overwrite_useragent(self): # This doesn't seem like a common use case, but we may want to allow this # when embedding the client in other pinecone tools such as canopy. extras = {"User-Agent": "test-user-agent"} p = Pinecone(api_key="123-456-789", additional_headers=extras) - assert "X-Pinecone-API-Version" in p.index_api.api_client.default_headers - assert p.index_api.api_client.default_headers["User-Agent"] == "test-user-agent" - assert len(p.index_api.api_client.default_headers) == 2 + assert "X-Pinecone-API-Version" in p.db._index_api.api_client.default_headers + assert p.db._index_api.api_client.default_headers["User-Agent"] == "test-user-agent" + assert len(p.db._index_api.api_client.default_headers) == 2 def test_set_source_tag_in_useragent(self): p = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", p.index_api.api_client.user_agent) is not None + re.search(r"source_tag=test_source_tag", p.db._index_api.api_client.user_agent) + is not None ) @pytest.mark.parametrize( @@ -146,8 +150,8 @@ def test_create_index_with_timeout( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db._index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db._index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -157,8 +161,8 @@ def test_create_index_with_timeout( timeout=timeout_value, ) - assert p.index_api.create_index.call_count == 1 - assert p.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db._index_api.create_index.call_count == 1 + assert p.db._index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls @pytest.mark.parametrize( @@ -207,7 +211,7 @@ def test_create_index_with_spec_dictionary(self, mocker, index_spec): p = Pinecone(api_key="123-456-789") mock_api = MagicMock() - mocker.patch.object(p, "index_api", mock_api) + mocker.patch.object(p.db, "_index_api", mock_api) p.create_index(name="my-index", dimension=10, spec=index_spec) @@ -242,8 +246,8 @@ def test_create_index_from_source_collection( expected_sleep_calls, ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db._index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.db._index_api, "create_index") mocker.patch("time.sleep") p.create_index( @@ -253,17 +257,19 @@ def test_create_index_from_source_collection( timeout=timeout_value, ) - assert p.index_api.create_index.call_count == 1 - assert p.index_api.describe_index.call_count == expected_describe_index_calls + assert p.db._index_api.create_index.call_count == 1 + assert p.db._index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls def test_create_index_when_timeout_exceeded(self, mocker): with pytest.raises(TimeoutError): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "create_index") + mocker.patch.object(p.db._index_api, "create_index") describe_index_response = [description_with_status(False)] * 5 - mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_response) + mocker.patch.object( + p.db._index_api, "describe_index", side_effect=describe_index_response + ) mocker.patch("time.sleep") p.create_index( @@ -273,7 +279,7 @@ def test_create_index_when_timeout_exceeded(self, mocker): def test_list_indexes_returns_iterable(self, mocker, index_list_response): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, "list_indexes", side_effect=[index_list_response]) + mocker.patch.object(p.db._index_api, "list_indexes", side_effect=[index_list_response]) response = p.list_indexes() assert [i.name for i in response] == ["index1", "index2", "index3"] diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 72ed7422..6e880016 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -1,7 +1,7 @@ import pandas as pd import pytest -from pinecone.data import _Index +from pinecone.db_data import _Index import pinecone.core.openapi.db_data.models as oai from pinecone import QueryResponse, UpsertResponse, Vector diff --git a/tests/unit/test_index_initialization.py b/tests/unit/test_index_initialization.py index 3d10d636..e20e3f78 100644 --- a/tests/unit/test_index_initialization.py +++ b/tests/unit/test_index_initialization.py @@ -51,5 +51,6 @@ def test_overwrite_useragent(self): def test_set_source_tag(self): pc = Pinecone(api_key="123-456-789", source_tag="test_source_tag") assert ( - re.search(r"source_tag=test_source_tag", pc.index_api.api_client.user_agent) is not None + re.search(r"source_tag=test_source_tag", pc.db._index_api.api_client.user_agent) + is not None ) diff --git a/tests/unit/test_plugin_aware.py b/tests/unit/test_plugin_aware.py new file mode 100644 index 00000000..a2912bfa --- /dev/null +++ b/tests/unit/test_plugin_aware.py @@ -0,0 +1,48 @@ +import pytest +from pinecone.utils.plugin_aware import PluginAware +from pinecone.config import Config, OpenApiConfiguration + + +class TestPluginAware: + def test_errors_when_required_attributes_are_missing(self): + class Foo(PluginAware): + def __init__(self): + # does not set config, openapi_config, or pool_threads + super().__init__() + + with pytest.raises(AttributeError) as e: + Foo() + + assert "_config" in str(e.value) + assert "_openapi_config" in str(e.value) + assert "_pool_threads" in str(e.value) + + def test_correctly_raise_attribute_errors(self): + class Foo(PluginAware): + def __init__(self): + self.config = Config() + self._openapi_config = OpenApiConfiguration() + self._pool_threads = 1 + + super().__init__() + + foo = Foo() + + with pytest.raises(AttributeError) as e: + foo.bar() + + assert "bar" in str(e.value) + + def test_plugins_are_lazily_loaded(self): + class Pinecone(PluginAware): + def __init__(self): + self.config = Config() + self._openapi_config = OpenApiConfiguration() + self._pool_threads = 10 + + super().__init__() + + pc = Pinecone() + assert "assistant" not in dir(pc) + + assert pc.assistant is not None diff --git a/tests/unit/test_query_results_aggregator.py b/tests/unit/test_query_results_aggregator.py index b40a11d2..d3c97f87 100644 --- a/tests/unit/test_query_results_aggregator.py +++ b/tests/unit/test_query_results_aggregator.py @@ -1,4 +1,4 @@ -from pinecone.data.query_results_aggregator import ( +from pinecone.db_data.query_results_aggregator import ( QueryResultsAggregator, QueryResultsAggregatorInvalidTopKError, ) diff --git a/tests/unit/utils/test_docs_links.py b/tests/unit/utils/test_docs_links.py index 478ba3b2..c1d01b21 100644 --- a/tests/unit/utils/test_docs_links.py +++ b/tests/unit/utils/test_docs_links.py @@ -1,11 +1,17 @@ import pytest import requests from pinecone.utils import docslinks +from pinecone import __version__ urls = list(docslinks.values()) @pytest.mark.parametrize("url", urls) def test_valid_links(url): - response = requests.get(url) - assert response.status_code == 200, f"Docs link is invalid: {url}" + if isinstance(url, str): + response = requests.get(url) + assert response.status_code == 200, f"Docs link is invalid: {url}" + else: + versioned_url = url(__version__) + response = requests.get(versioned_url) + assert response.status_code == 200, f"Docs link is invalid: {versioned_url}" diff --git a/tests/unit/utils/test_setup_openapi_client.py b/tests/unit/utils/test_setup_openapi_client.py index acd96c34..199e4ae2 100644 --- a/tests/unit/utils/test_setup_openapi_client.py +++ b/tests/unit/utils/test_setup_openapi_client.py @@ -19,7 +19,7 @@ def test_setup_openapi_client(self): openapi_config=openapi_config, pool_threads=2, ) - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) assert re.match( user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"] @@ -38,7 +38,7 @@ def test_setup_openapi_client_with_api_version(self): pool_threads=2, api_version="2024-04", ) - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) assert re.match( user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"] @@ -102,7 +102,7 @@ def test_setup_openapi_client_with_host_override(self, plugin_api_version, plugi assert isinstance(plugin_client, plugin_api) # We want requests from plugins to have a user-agent matching the host SDK. - user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") + user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+") assert re.match(user_agent_regex, plugin_client.api_client.user_agent) assert re.match(user_agent_regex, plugin_client.api_client.default_headers["User-Agent"]) diff --git a/tests/unit/utils/test_user_agent.py b/tests/unit/utils/test_user_agent.py index 58cdfbbf..4eb5b06e 100644 --- a/tests/unit/utils/test_user_agent.py +++ b/tests/unit/utils/test_user_agent.py @@ -7,30 +7,38 @@ class TestUserAgent: def test_user_agent(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") useragent = get_user_agent(config) - assert re.search(r"python-client-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None + assert re.search(r"^python-client-\d+\.\d+\.\d+$", useragent) is not None def test_user_agent_with_source_tag(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" ) useragent = get_user_agent(config) - assert re.search(r"python-client-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) def test_source_tag_is_normalized(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my source tag!!!!" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) + assert "!!!!" not in useragent config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="My Source Tag" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) + assert "My Source Tag" not in useragent config = ConfigBuilder.build( api_key="my-api-key", @@ -46,25 +54,31 @@ def test_source_tag_is_normalized(self): source_tag=" My Source Tag 123 #### !! ", ) useragent = get_user_agent(config) - assert re.search(r"source_tag=my_source_tag_123", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=my_source_tag_123$", useragent) + is not None + ) config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="colon:allowed" ) useragent = get_user_agent(config) - assert re.search(r"source_tag=colon:allowed", useragent) is not None + assert ( + re.search(r"^python-client-\d+\.\d+\.\d+; source_tag=colon:allowed$", useragent) + is not None + ) def test_user_agent_grpc(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") useragent = get_user_agent_grpc(config) assert re.search(r"python-client\[grpc\]-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None def test_user_agent_grpc_with_source_tag(self): config = ConfigBuilder.build( api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" ) useragent = get_user_agent_grpc(config) - assert re.search(r"python-client\[grpc\]-\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"source_tag=my_source_tag", useragent) is not None + assert ( + re.search(r"^python-client\[grpc\]-\d+\.\d+\.\d+; source_tag=my_source_tag$", useragent) + is not None + ) diff --git a/tests/unit_grpc/test_grpc_index_initialization.py b/tests/unit_grpc/test_grpc_index_initialization.py index 710c3f26..b0b64250 100644 --- a/tests/unit_grpc/test_grpc_index_initialization.py +++ b/tests/unit_grpc/test_grpc_index_initialization.py @@ -43,7 +43,7 @@ def test_config_passed_when_target_by_name(self): # Set this state in the host store to skip network call # to find host for name - pc.index_host_store.set_host(pc.config, "my-index", "myhost") + pc.db.index._index_host_store.set_host(pc._config, "my-index", "myhost") config = GRPCClientConfig(timeout=10, secure=False) index = pc.Index(name="my-index", grpc_config=config) @@ -88,5 +88,6 @@ def test_config_passed_when_target_by_host_and_port(self): def test_config_passes_source_tag_when_set(self): pc = PineconeGRPC(api_key="YOUR_API_KEY", source_tag="my_source_tag") assert ( - re.search(r"source_tag=my_source_tag", pc.index_api.api_client.user_agent) is not None + re.search(r"source_tag=my_source_tag", pc.db._index_api.api_client.user_agent) + is not None ) diff --git a/tests/upgrade/test_all.py b/tests/upgrade/test_all.py new file mode 100644 index 00000000..acabf620 --- /dev/null +++ b/tests/upgrade/test_all.py @@ -0,0 +1,28 @@ +class TestAll: + def test_all_is_complete(self): + """Test that __all__ is complete and accurate.""" + # Import the module + import pinecone + + # Get all public names (those that don't start with _) + public_names = {name for name in dir(pinecone) if not name.startswith("_")} + + # Get __all__ if it exists, otherwise empty set + all_names = set(getattr(pinecone, "__all__", [])) + + # Check that __all__ exists + assert hasattr(pinecone, "__all__"), "Module should have __all__ defined" + + # Check that all names in __all__ are actually importable + for name in all_names: + assert getattr(pinecone, name) is not None, f"Name {name} in __all__ is not importable" + + # Check that all public names are in __all__ + missing_from_all = public_names - all_names + for name in missing_from_all: + print(f"Public name {name} is not in __all__") + assert not missing_from_all, f"Public names not in __all__: {missing_from_all}" + + # Check that __all__ doesn't contain any private names + private_in_all = {name for name in all_names if name.startswith("_")} + assert not private_in_all, f"Private names in __all__: {private_in_all}" diff --git a/tests/upgrade/test_reorganization.py b/tests/upgrade/test_reorganization.py new file mode 100644 index 00000000..331681b7 --- /dev/null +++ b/tests/upgrade/test_reorganization.py @@ -0,0 +1,19 @@ +import pytest + + +class TestReorganization: + def test_data(self): + with pytest.warns(DeprecationWarning) as warning_info: + from pinecone.data import Index + + assert Index is not None + assert len(warning_info) > 0 + assert "has moved to" in str(warning_info[0].message) + + def test_config(self): + with pytest.warns(DeprecationWarning) as warning_info: + from pinecone.config import PineconeConfig + + assert PineconeConfig is not None + assert len(warning_info) > 0 + assert "has moved to" in str(warning_info[0].message) diff --git a/tests/upgrade/test_v6_upgrade.py b/tests/upgrade/test_v6_upgrade.py new file mode 100644 index 00000000..6532f65f --- /dev/null +++ b/tests/upgrade/test_v6_upgrade.py @@ -0,0 +1,263 @@ +import pinecone +import logging + +logger = logging.getLogger(__name__) + + +class TestExpectedImports_UpgradeFromV6: + def test_mapped_data_imports(self): + data_imports = [ + "Vector", + "QueryRequest", + "FetchResponse", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "RpcStatus", + "ScoredVector", + "ServiceException", + "SingleQueryResults", + "QueryResponse", + "RerankModel", + "SearchQuery", + "SearchQueryVector", + "SearchRerank", + "UpsertResponse", + "UpdateRequest", + ] + + control_imports = [ + "CollectionDescription", + "CollectionList", + "ServerlessSpec", + "ServerlessSpecDefinition", + "PodSpec", + "PodSpecDefinition", + # 'ForbiddenException', + # 'ImportErrorMode', + # 'Index', + "IndexList", + "IndexModel", + # 'ListConversionException', + # 'MetadataDictionaryExpectedError', + # 'NotFoundException', + ] + + config_imports = [ + "Config", + "ConfigBuilder", + "PineconeConfig", + "PineconeConfigurationError", + "PineconeException", + "PineconeProtocolError", + "PineconeApiAttributeError", + "PineconeApiException", + ] + + exception_imports = [ + "PineconeConfigurationError", + "PineconeProtocolError", + "PineconeException", + "PineconeApiAttributeError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeApiKeyError", + "PineconeApiException", + "NotFoundException", + "UnauthorizedException", + "ForbiddenException", + "ServiceException", + "ListConversionException", + ] + mapped_imports = data_imports + control_imports + config_imports + exception_imports + + for import_name in mapped_imports: + assert hasattr(pinecone, import_name), f"Import {import_name} not found in pinecone" + + def test_v6_upgrade_root_imports(self): + v6_dir_items = [ + "CollectionDescription", + "CollectionList", + "Config", + "ConfigBuilder", + "DeleteRequest", + "DescribeIndexStatsRequest", + "DescribeIndexStatsResponse", + "FetchResponse", + "ForbiddenException", + "ImportErrorMode", + "Index", + "IndexList", + "IndexModel", + "ListConversionException", + "MetadataDictionaryExpectedError", + "NotFoundException", + "Pinecone", + "PineconeApiAttributeError", + "PineconeApiException", + "PineconeApiKeyError", + "PineconeApiTypeError", + "PineconeApiValueError", + "PineconeConfig", + "PineconeConfigurationError", + "PineconeException", + "PineconeProtocolError", + "PodSpec", + "PodSpecDefinition", + "QueryRequest", + "QueryResponse", + "RpcStatus", + "ScoredVector", + "ServerlessSpec", + "ServerlessSpecDefinition", + "ServiceException", + "SingleQueryResults", + "SparseValues", + "SparseValuesDictionaryExpectedError", + "SparseValuesMissingKeysError", + "SparseValuesTypeError", + "TqdmExperimentalWarning", + "UnauthorizedException", + "UpdateRequest", + "UpsertRequest", + "UpsertResponse", + "Vector", + "VectorDictionaryExcessKeysError", + "VectorDictionaryMissingKeysError", + "VectorTupleLengthError", + "__builtins__", + "__cached__", + "__doc__", + "__file__", + "__loader__", + "__name__", + "__package__", + "__path__", + "__spec__", + "__version__", + "config", + "configure_index", + "control", + "core", + "core_ea", + "create_collection", + "create_index", + "data", + "delete_collection", + "delete_index", + "deprecation_warnings", + "describe_collection", + "describe_index", + "errors", + "exceptions", + "features", + "index", + "index_host_store", + "init", + "install_repr_overrides", + "langchain_import_warnings", + "list_collections", + "list_indexes", + "logging", + "models", + "openapi", + "os", + "pinecone", + "pinecone_config", + "repr_overrides", + "scale_index", + "sparse_vector_factory", + "utils", + "vector_factory", + "warnings", + ] + + intentionally_removed_items = ["os"] + + expected_items = [item for item in v6_dir_items if item not in intentionally_removed_items] + + missing_items = [] + for item in expected_items: + if not hasattr(pinecone, item): + missing_items.append(item) + logger.debug(f"Exported: ❌ {item}") + else: + logger.debug(f"Exported: ✅ {item}") + + extra_items = [] + for item in intentionally_removed_items: + if hasattr(pinecone, item): + extra_items.append(item) + logger.debug(f"Removed: ❌ {item}") + else: + logger.debug(f"Removed: ✅ {item}") + + assert len(missing_items) == 0, f"Missing items: {missing_items}" + assert len(extra_items) == 0, f"Extra items: {extra_items}" + + # def test_v6_upgrade_data_imports(self): + # v6_data_dir_items = [ + # "DescribeIndexStatsResponse", + # "EmbedModel", + # "FetchResponse", + # "ImportErrorMode", + # "Index", + # "IndexClientInstantiationError", + # "Inference", + # "InferenceInstantiationError", + # "MetadataDictionaryExpectedError", + # "QueryResponse", + # "RerankModel", + # "SearchQuery", + # "SearchQueryVector", + # "SearchRerank", + # "SparseValues", + # "SparseValuesDictionaryExpectedError", + # "SparseValuesMissingKeysError", + # "SparseValuesTypeError", + # "UpsertResponse", + # "Vector", + # "VectorDictionaryExcessKeysError", + # "VectorDictionaryMissingKeysError", + # "VectorTupleLengthError", + # "_AsyncioInference", + # "_Index", + # "_IndexAsyncio", + # "_Inference", + # "__builtins__", + # "__cached__", + # "__doc__", + # "__file__", + # "__loader__", + # "__name__", + # "__package__", + # "__path__", + # "__spec__", + # "dataclasses", + # "errors", + # "features", + # "fetch_response", + # "import_error", + # "index", + # "index_asyncio", + # "index_asyncio_interface", + # "interfaces", + # "query_results_aggregator", + # "request_factory", + # "search_query", + # "search_query_vector", + # "search_rerank", + # "sparse_values", + # "sparse_values_factory", + # "types", + # "utils", + # "vector", + # "vector_factory", + # ] + + # missing_items = [] + # for item in v6_data_dir_items: + # if item not in dir(pinecone.db_data): + # missing_items.append(item) + + # assert len(missing_items) == 0, f"Missing items: {missing_items}"