diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 404795164d2e..d1cdeda583a1 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -134,6 +134,7 @@ "eng/common/docgeneration/Generate-DocIndex.ps1", "eng/**/*.py", ".gitignore", + "pylintrc", "tools/azure-sdk-tools/ci_tools/github_tools.py", "tools/azure-sdk-tools/devtools_testutils/fake_credentials.py", "tools/azure-sdk-tools/packaging_tools/**", @@ -1360,6 +1361,13 @@ "azureopenai" ] }, + { + "filename": "sdk/ai/azure-ai-projects-onedp/**", + "words": [ + "aiservices", + "azureai", + ] + }, { "filename": "sdk/ai/azure-ai-inference/**", "words": [ diff --git a/eng/.docsettings.yml b/eng/.docsettings.yml index 679c12c2d4a8..1126ce0e1c6f 100644 --- a/eng/.docsettings.yml +++ b/eng/.docsettings.yml @@ -114,6 +114,8 @@ known_content_issues: - ['sdk/openai/azure-openai/README.md', '#4554'] - ['sdk/ai/azure-ai-generative/README.md', 'Deprecated package.'] - ['sdk/ai/azure-ai-resources/README.md', 'Deprecated package.'] + - ['sdk/ai/azure-ai-projects-onedp/tests/README.md', 'readme has at least one missing required section'] + # common. - ['sdk/appconfiguration/azure-appconfiguration/README.md', 'common'] diff --git a/pylintrc b/pylintrc index e58b01fd5c1b..a2b01532c656 100644 --- a/pylintrc +++ b/pylintrc @@ -8,6 +8,7 @@ ignore-paths= azure\\mixedreality\\remoterendering\\_api_version.py, azure/mixedreality/remoterendering/_api_version.py, (?:.*[/\\]|^)projects/(models/_models.py|_model_base.py|operations/_operations.py|aio/operations/_operations.py)$, + (?:.*[/\\]|^)projects/onedp/(models/_models.py|_model_base.py|operations/_operations.py|aio/operations/_operations.py)$, # Exclude any path that contains the following directory names (?:.*[/\\]|^)(?:_vendor|_generated|_restclient|samples|examples|test|tests|doc|\.tox)(?:[/\\]|$) diff --git a/sdk/ai/azure-ai-projects-onedp/CHANGELOG.md b/sdk/ai/azure-ai-projects-onedp/CHANGELOG.md new file mode 100644 index 000000000000..019b8481fc84 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/CHANGELOG.md @@ -0,0 +1,170 @@ +# Release History + +## 1.0.0b10 (Unreleased) + +Major changes happened in this version as the client library switched to using the new AI Foundry data-plane REST APIs. +(TODO: Add link). Please see updated samples. + +### Breaking changes + +* Endpoint URL is now needed to construct the `AIProjectClient`, instead of using the factory method +`.from_connection_string`. Find this endpoint URL in your AI Foundry project page. +* Agent operations that were previously part of the `azure.ai.projects` package have moved out to a separate new package +`azure-ai-assistants` with a client named `AssistantClient`. See INSERT URL HERE for more information. You can get the `AssistantClient` by calling `.assistant.get_client()` method on your `AIProjectClient`. +* Import `PromptTemplate` from `azure.ai.projects` instead of `azure.ai.projects.prompts`. +* Several changes to `.connections` operations. Please see new connection samples. +* TODO: `.evaluations` methods .. + +### Features added + +* `.deployment` methods to enumerate the deployed AI models in your AI Foundry project. +* `.datasets` method to upload documents and reference them. These documents will be used to augment the capability +of your selected LLM (RAG pattern). +* `.indexes` methods to handle your AI search indexes and search queries, as part of RAG pattern. +* TODO: `.red_team` methods ... + +### Sample updates + +### Bugs Fixed + +## 1.0.0b9 (2025-04-16) + +### Features added + +* Utilities to load prompt template strings and Prompty file content +* Added BingCustomSearchTool class with sample +* Added list_threads API to agents namespace +* Added image input support for agents create_message + +### Sample updates + +* Added `project_client.agents.enable_auto_function_calls(toolset=toolset)` to all samples that has `toolcalls` executed by `azure-ai-project` SDK +* New BingCustomSearchTool sample +* New samples added for image input from url, file and base64 + +### Breaking Changes + +Redesigned automatic function calls because agents retrieved by `update_agent` and `get_agent` do not support them. With the new design, the toolset parameter in `create_agent` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. + +## 1.0.0b8 (2025-03-28) + +### Features added + +* New parameters added for Azure AI Search tool, with corresponding sample update. +* Fabric tool REST name updated, along with convenience code. + +### Sample updates + +* Sample update demonstrating new parameters added for Azure AI Search tool. +* Sample added using OpenAPI tool against authenticated TripAdvisor API spec. + +### Bugs Fixed + +* Fix for a bug in Agent tracing causing event handler return values to not be returned when tracing is enabled. +* Fix for a bug in Agent tracing causing tool calls not to be recorded in traces. +* Fix for a bug in Agent tracing causing function tool calls to not work properly when tracing is enabled. +* Fix for a bug in Agent streaming, where `agent_id` was not included in the response. This caused the SDK not to make function calls when the thread run status is `requires_action`. + +## 1.0.0b7 (2025-03-06) + +### Features added + +* Add support for parsing URL citations in Agent text messages. See new classes `MessageTextUrlCitationAnnotation` and `MessageDeltaTextUrlCitationAnnotation`. +* Add enum value `ConnectionType.API_KEY` to support enumeration of generic connections that uses API Key authentication. + +### Sample updates + +* Update sample `sample_agents_bing_grounding.py` with printout of URL citation. +* Add new samples `sample_agents_stream_eventhandler_with_bing_grounding.py` and `sample_agents_stream_iteration_with_bing_grounding.py` with printout of URL citation. + +### Bugs Fixed + +* Fix a bug in deserialization of `RunStepDeltaFileSearchToolCall` returned during Agent streaming (see [GitHub issue 48333](https://github.com/Azure/azure-sdk-for-net/issues/48333)). +* Fix for Exception raised while parsing Agent streaming response, in some rare cases, for multibyte UTF-8 languages like Chinese. + +### Breaking Changes + +* Rename input argument `assistant_id` to `agent_id` in all Agent methods to align with the "Agent" terminology. Similarly, rename all `assistant_id` properties on classes. + +## 1.0.0b6 (2025-02-14) + +### Features added + +* Added `trace_function` decorator for conveniently tracing function calls in Agents using OpenTelemetry. Please see the README.md for updated documentation. + +### Sample updates + +* Added AzureLogicAppTool utility and Logic App sample under `samples/agents`, folder to make Azure Logic App integration with Agents easier. +* Added better observability for Azure AI Search sample for Agents via improved run steps information from the service. +* Added sample to demonstrate how to add custom attributes to telemetry span. + +### Bugs Fixed + +* Lowered the logging level of "Toolset is not available in the client" from `warning` to `debug` to prevent unnecessary log entries in agent application runs. + +## 1.0.0b5 (2025-01-17) + +### Features added + +* Add method `.inference.get_image_embeddings_client` on `AIProjectClient` to get an authenticated +`ImageEmbeddingsClient` (from the package azure-ai-inference). You need to have azure-ai-inference package +version 1.0.0b7 or above installed for this method to work. + +### Bugs Fixed + +* Fix for events dropped in streamed Agent response (see [GitHub issue 39028](https://github.com/Azure/azure-sdk-for-python/issues/39028)). +* In Agents, incomplete status thread run event is now deserialized into a ThreadRun object, during stream iteration, and invokes the correct function `on_thread_run` (instead of the wrong function `on_unhandled_event`). +* Fix an error when calling the `to_evaluator_model_config` method of class `ConnectionProperties`. See new input +argument `include_credentials`. + +### Breaking Changes + +* `submit_tool_outputs_to_run` returns `None` instead of `ThreadRun` (see [GitHub issue 39028](https://github.com/Azure/azure-sdk-for-python/issues/39028)). + +## 1.0.0b4 (2024-12-20) + +### Bugs Fixed + +* Fix for Agent streaming issue (see [GitHub issue 38918](https://github.com/Azure/azure-sdk-for-python/issues/38918)) +* Fix for Agent async function `send_email_async` is not called (see [GitHub issue 38898](https://github.com/Azure/azure-sdk-for-python/issues/38898)) +* Fix for Agent streaming with event handler fails with "AttributeError: 'MyEventHandler' object has no attribute 'buffer'" (see [GitHub issue 38897](https://github.com/Azure/azure-sdk-for-python/issues/38897)) + +### Features Added + +* Add optional input argument `connection_name` to methods `.inference.get_chat_completions_client`, + `.inference.get_embeddings_client` and `.inference.get_azure_openai_client`. + +## 1.0.0b3 (2024-12-13) + +### Features Added + +* Add support for Structured Outputs for Agents. +* Add option to include file contents, when index search is used for Agents. +* Added objects to inform Agents about Azure Functions. +* Redesigned streaming and event handlers for agents. +* Add `parallel_tool_calls` parameter to allow parallel tool execution for Agents. +* Added `BingGroundingTool` for Agents to use against a Bing API Key connection. +* Added `AzureAiSearchTool` for Agents to use against an Azure AI Search resource. +* Added `OpenApiTool` for Agents, which creates and executes a REST function defined by an OpenAPI spec. +* Added new helper properties in `OpenAIPageableListOfThreadMessage`, `MessageDeltaChunk`, and `ThreadMessage`. +* Rename "AI Studio" to "AI Foundry" in package documents and samples, following recent rebranding. + +### Breaking Changes + +* The method `.agents.get_messages` was removed. Please use `.agents.list_messages` instead. + +## 1.0.0b2 (2024-12-03) + +### Bugs Fixed + +* Fix a bug in the `.inference` operations when Entra ID authentication is used by the default connection. +* Fixed bugs occurring during streaming in function tool calls by asynchronous agents. +* Fixed bugs that were causing issues with tracing agent asynchronous functionality. +* Fix a bug causing warning about unclosed session, shown when using asynchronous credentials to create agent. +* Fix a bug that would cause agent function tool related function names and parameters to be included in traces even when content recording is not enabled. + +## 1.0.0b1 (2024-11-15) + +### Features Added + +First beta version diff --git a/sdk/ai/azure-ai-projects-onedp/LICENSE b/sdk/ai/azure-ai-projects-onedp/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/MANIFEST.in b/sdk/ai/azure-ai-projects-onedp/MANIFEST.in new file mode 100644 index 000000000000..532c97cd5f48 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/MANIFEST.in @@ -0,0 +1,8 @@ +include *.md +include LICENSE +include azure/ai/projects/onedp/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/projects/__init__.py diff --git a/sdk/ai/azure-ai-projects-onedp/README.md b/sdk/ai/azure-ai-projects-onedp/README.md new file mode 100644 index 000000000000..622a5b28864b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/README.md @@ -0,0 +1,524 @@ +# Azure AI Projects client library for Python + +Use the AI Projects client library (in preview) to: + +* **Enumerate AI Models** deployed to your Azure AI Foundry project. +* **Enumerate connected Azure resources** and get their properties. +* **Upload documents and create Datasets** to reference them. +* **Create and enumerate search Indexes**. +* **Get an authenticated Assistant client**. +* **Get an authenticated Inference client** (Azure OpenAI or Azure AI Inference) for chat completions, text or image embeddings. +* **Read a Prompty file or string** and render messages for inference clients. +* **Run Evaluations** to assess the performance of generative AI applications. +* **Enable OpenTelemetry tracing**. + +[Product documentation](https://aka.ms/azsdk/azure-ai-projects/product-doc) +| [Samples][samples] +| [API reference documentation](https://aka.ms/azsdk/azure-ai-projects/python/reference) +| [Package (PyPI)](https://aka.ms/azsdk/azure-ai-projects/python/package) +| [SDK source code](https://aka.ms/azsdk/azure-ai-projects/python/code) +| [AI Starter Template](https://aka.ms/azsdk/azure-ai-projects/python/ai-starter-template) + +## Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-projects" in the title or content. + +## Getting started + +### Prerequisite + +- Python 3.9 or later. +- An [Azure subscription][azure_sub]. +- A [project in Azure AI Foundry](https://learn.microsoft.com/azure/ai-studio/how-to/create-projects). +- The project endpoint URL, of the form `https://.services.ai.azure.com/api/projects/`. It can be found in your Azure AI Foundry project overview page, under "Project details". Below we will assume the environment variable `PROJECT_ENDPOINT` was defined to hold this value. +- An Entra ID token for authentication. Your application needs an object that implements the [TokenCredential](https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential) interface. Code samples here use [DefaultAzureCredential](https://learn.microsoft.com/python/api/azure-identity/azure.identity.defaultazurecredential). To get that working, you will need: + * An appropriate role assignment. see [Role-based access control in Azure AI Foundry portal](https://learn.microsoft.com/azure/ai-foundry/concepts/rbac-ai-foundry). Role assigned can be done via the "Access Control (IAM)" tab of your Azure AI Project resource in the Azure portal. + * [Azure CLI](https://learn.microsoft.com/cli/azure/install-azure-cli) installed. + * You are logged into your Azure account by running `az login`. + * Note that if you have multiple Azure subscriptions, the subscription that contains your Azure AI Project resource must be your default subscription. Run `az account list --output table` to list all your subscription and see which one is the default. Run `az account set --subscription "Your Subscription ID or Name"` to change your default subscription. + +### Install the package + +```bash +pip install azure-ai-projects +``` + +## Key concepts + +### Create and authenticate the client with Entra ID + +To construct a synchronous client: + +```python +import os +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential + +project_client = AIProjectClient( + credential=DefaultAzureCredential(), + endpoint=os.environ["PROJECT_ENDPOINT"], +) +``` + +To construct an asynchronous client, Install the additional package [aiohttp](https://pypi.org/project/aiohttp/): + +```bash +pip install aiohttp +``` + +and update the code above to import `asyncio`, and import `AIProjectClient` from the `azure.ai.projects.aio` namespace: + +```python +import os +import asyncio +from azure.ai.projects.aio import AIProjectClient +from azure.core.credentials import AzureKeyCredential + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + endpoint=os.environ["PROJECT_ENDPOINT"], +) +``` + +## Examples + +### Getting an authenticated Assistant client + +Below is a code example of how to get an authenticated `AssistantsClient` from the `azure-ai-assistants` package. +Full samples can be found under the `assistants` folder in the [package samples][samples]. + + + +```python +with project_client.assistants.get_client() as client: + # TODO: Do something with the assistants client... + pass +``` + + + +### Get an authenticated ChatCompletionsClient + +Your Azure AI Foundry project may have one or more AI models deployed that support chat completions. These could be OpenAI models, Microsoft models, or models from other providers. Use the code below to get an authenticated [ChatCompletionsClient](https://learn.microsoft.com/python/api/azure-ai-inference/azure.ai.inference.chatcompletionsclient) from the [azure-ai-inference](https://pypi.org/project/azure-ai-inference/) package, and execute a chat completions call. + +First, install the package: + +```bash +pip install azure-ai-inference +``` + +Then run the code below. Here we assume `model_deployment_name` holds the model deployment name. + + + +```python +with project_client.inference.get_chat_completions_client() as client: + + response = client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + + print(response.choices[0].message.content) +``` + + + +See the "inference" folder in the [package samples][samples] for additional samples, including getting an authenticated [EmbeddingsClient](https://learn.microsoft.com/python/api/azure-ai-inference/azure.ai.inference.embeddingsclient) and [ImageEmbeddingsClient](https://learn.microsoft.com/python/api/azure-ai-inference/azure.ai.inference.imageembeddingsclient). + + +### Get an authenticated AzureOpenAI client + +Your Azure AI Foundry project may have one or more OpenAI models deployed that support chat completions. Use the code below to get an authenticated [AzureOpenAI](https://github.com/openai/openai-python?tab=readme-ov-file#microsoft-azure-openai) from the [openai](https://pypi.org/project/openai/) package, and execute a chat completions call. + +First, install the package: + +```bash +pip install openai +``` + +Then run the code below. Here we assume `model_deployment_name` holds the model deployment name. Update the `api_version` +value with one found in the "Data plane - inference" row [in this table](https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs). +You also have the option (not shown) to explicitly specify the Azure OpenAI connection name in your AI Foundry Project, which +the `get_azure_openai_client` method will use to get the inference endpoint and authentication credentials. +If not present the default Azure OpenAI connection will be used. + + + +```python +with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) +``` + + + +See the "inference" folder in the [package samples][samples] for additional samples. + +### Deployments operations + +The code below shows some Deployments operations. Full samples can be found under the "deployment" +folder in the [package samples][samples]. + + + +```python +print("List all deployments:") +for deployment in project_client.deployments.list(): + print(deployment) + +print(f"List all deployments by the model publisher `{model_publisher}`:") +for deployment in project_client.deployments.list(model_publisher=model_publisher): + print(deployment) + +print(f"Get a single deployment named `{model_deployment_name}`:") +deployment = project_client.deployments.get(model_deployment_name) +print(deployment) +``` + + + +### Connections operations + +The code below shows some Connection operations. Full samples can be found under the "connetions" +folder in the [package samples][samples]. + + + +```python +print("List the properties of all connections:") +for connection in project_client.connections.list(): + print(connection) + +print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") +for connection in project_client.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, +): + print(connection) + +print(f"Get the properties of a connection named `{connection_name}`:") +connection = project_client.connections.get(connection_name) +print(connection) +``` + + + +### Dataset operations + +The code below shows some Dataset operations. Full samples can be found under the "datasets" +folder in the [package samples][samples]. + + + +```python +print( + "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." +) +dataset: DatasetVersion = project_client.datasets.upload_file_and_create( + name=dataset_name, + version=dataset_version, + file="sample_folder/sample_file1.txt", +) +print(dataset) + +""" +print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") +dataset = project_client.datasets.upload_folder_and_create( + name=dataset_name, + version="2", + folder="sample_folder", +) +print(dataset) + +print("Upload a single file to the existing dataset, while letting the service increment the version") +dataset: DatasetVersion = project_client.datasets.upload_file_and_create( + name=dataset_name, + file="sample_folder/file2.txt", +) +print(dataset) + +print("Get an existing Dataset version `1`:") +dataset = project_client.datasets.get_version(name=dataset_name, version="1") +print(dataset) + +print(f"Listing all versions of the Dataset named `{dataset_name}`:") +for dataset in project_client.datasets.list_versions(name=dataset_name): + print(dataset) + +print("List latest versions of all Datasets:") +for dataset in project_client.datasets.list_latest(): + print(dataset) + +print("Delete all Dataset versions created above:") +project_client.datasets.delete_version(name=dataset_name, version="1") +project_client.datasets.delete_version(name=dataset_name, version="2") +project_client.datasets.delete_version(name=dataset_name, version="3") +""" +``` + + + +### Indexes operations + +The code below shows some Indexes operations. Full samples can be found under the "indexes" +folder in the [package samples][samples]. + + + +```python +print(f"Create an Index named `{index_name}` referencing an existing AI Search resource:") +index = project_client.indexes.create_version( + name=index_name, + version=index_version, + body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), +) +print(index) +exit() + +print(f"Get an existing Index named `{index_name}`, version `{index_version}`:") +index = project_client.indexes.get_version(name=index_name, version=index_version) +print(index) + +print(f"Listing all versions of the Index named `{index_name}`:") +for index in project_client.indexes.list_versions(name=index_name): + print(index) + +print("List latest versions of all Indexes:") +for index in project_client.indexes.list_latest(): + print(index) + +print("Delete the Index versions created above:") +project_client.indexes.delete_version(name=index_name, version="1") +project_client.indexes.delete_version(name=index_name, version="2") +``` + + + +### Evaluation + +Evaluation in Azure AI Project client library is designed to assess the performance of generative AI applications in the cloud. The output of Generative AI application is quantitively measured with mathematical based metrics, AI-assisted quality and safety metrics. Metrics are defined as evaluators. Built-in or custom evaluators can provide comprehensive insights into the application's capabilities and limitations. + +#### Evaluator + +Evaluators are custom or prebuilt classes or functions that are designed to measure the quality of the outputs from language models or generative AI applications. + +Evaluators are made available via [azure-ai-evaluation][azure_ai_evaluation] SDK for local experience and also in [Evaluator Library][evaluator_library] in Azure AI Foundry for using them in the cloud. + +More details on built-in and custom evaluators can be found [here][evaluators]. + +#### Run Evaluation in the cloud + +To run evaluation in the cloud the following are needed: + +- Evaluators +- Data to be evaluated +- [Optional] Azure Open AI model. + +##### Evaluators + +For running evaluator in the cloud, evaluator `ID` is needed. To get it via code you use [azure-ai-evaluation][azure_ai_evaluation] + +```python +# pip install azure-ai-evaluation + +from azure.ai.evaluation import RelevanceEvaluator + +evaluator_id = RelevanceEvaluator.id +``` + +##### Data to be evaluated + +Evaluation in the cloud supports data in form of `jsonl` file. Data can be uploaded via the helper method `upload_file` on the project client. + +```python +# Upload data for evaluation and get dataset id +data_id, _ = project_client.upload_file("") +``` + +##### [Optional] Azure OpenAI Model + +Azure AI Foundry project comes with a default Azure Open AI endpoint which can be easily accessed using following code. This gives you the endpoint details for you Azure OpenAI endpoint. Some of the evaluators need model that supports chat completion. + +```python +default_connection = project_client.connections.get_default(connection_type=ConnectionType.AZURE_OPEN_AI) +``` + +##### Example Remote Evaluation + +```python +import os +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import Evaluation, Dataset, EvaluatorConfiguration, ConnectionType +from azure.ai.evaluation import F1ScoreEvaluator, RelevanceEvaluator, HateUnfairnessEvaluator + + +# Create project client +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +# Upload data for evaluation and get dataset id +data_id, _ = project_client.upload_file("") + +deployment_name = "" +api_version = "" + +# Create an evaluation +evaluation = Evaluation( + display_name="Remote Evaluation", + description="Evaluation of dataset", + data=Dataset(id=data_id), + evaluators={ + "f1_score": EvaluatorConfiguration( + id=F1ScoreEvaluator.id, + ), + "relevance": EvaluatorConfiguration( + id=RelevanceEvaluator.id, + init_params={ + "model_config": default_connection.to_evaluator_model_config( + deployment_name=deployment_name, api_version=api_version + ) + }, + ), + "violence": EvaluatorConfiguration( + id=ViolenceEvaluator.id, + init_params={"azure_ai_project": project_client.scope}, + ), + }, +) + + +evaluation_response = project_client.evaluations.create( + evaluation=evaluation, +) + +# Get evaluation +get_evaluation_response = project_client.evaluations.get(evaluation_response.id) + +print("----------------------------------------------------------------") +print("Created evaluation, evaluation ID: ", get_evaluation_response.id) +print("Evaluation status: ", get_evaluation_response.status) +if isinstance(get_evaluation_response.properties, dict): + print("AI Foundry URI: ", get_evaluation_response.properties["AiStudioEvaluationUri"]) +print("----------------------------------------------------------------") +``` + +NOTE: For running evaluators locally refer to [Evaluate with the Azure AI Evaluation SDK][evaluators]. + +## Troubleshooting + +### Exceptions + +Client methods that make service calls raise an [HttpResponseError](https://learn.microsoft.com/python/api/azure-core/azure.core.exceptions.httpresponseerror) exception for a non-success HTTP status code response from the service. The exception's `status_code` will hold the HTTP response status code (with `reason` showing the friendly name). The exception's `error.message` contains a detailed message that may be helpful in diagnosing the issue: + +```python +from azure.core.exceptions import HttpResponseError + +... + +try: + result = project_client.connections.list() +except HttpResponseError as e: + print(f"Status code: {e.status_code} ({e.reason})") + print(e.message) +``` + +For example, when you provide wrong credentials: + +```text +Status code: 401 (Unauthorized) +Operation returned an invalid status 'Unauthorized' +``` + +### Logging + +The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following at the top of your Python script: + +```python +import sys +import logging + +# Acquire the logger for this client library. Use 'azure' to affect both +# 'azure.core` and `azure.ai.inference' libraries. +logger = logging.getLogger("azure") + +# Set the desired logging level. logging.INFO or logging.DEBUG are good options. +logger.setLevel(logging.DEBUG) + +# Direct logging output to stdout: +handler = logging.StreamHandler(stream=sys.stdout) +# Or direct logging output to a file: +# handler = logging.FileHandler(filename="sample.log") +logger.addHandler(handler) + +# Optional: change the default logging format. Here we add a timestamp. +#formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") +#handler.setFormatter(formatter) +``` + +By default logs redact the values of URL query strings, the values of some HTTP request and response headers (including `Authorization` which holds the key or token), and the request and response payloads. To create logs without redaction, add `logging_enable = True` to the client constructor: + +```python +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], + logging_enable = True +) +``` + +Note that the log level must be set to `logging.DEBUG` (see above code). Logs will be redacted with any other log level. + +Be sure to protect non redacted logs to avoid compromising security. + +For more information, see [Configure logging in the Azure libraries for Python](https://aka.ms/azsdk/python/logging) + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-projects" in the title or content. + +## Next steps + +Have a look at the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-projects/samples) folder, containing fully runnable Python code for synchronous and asynchronous clients. + +Explore the [AI Starter Template](https://aka.ms/azsdk/azure-ai-projects/python/ai-starter-template). This template creates an Azure AI Foundry hub, project and connected resources including Azure OpenAI Service, AI Search and more. It also deploys a simple chat application to Azure Container Apps. + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. + + +[samples]: https://aka.ms/azsdk/azure-ai-projects/python/samples/ +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ +[entra_id]: https://learn.microsoft.com/azure/ai-services/authentication?tabs=powershell#authenticate-with-microsoft-entra-id +[azure_identity_credentials]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials +[azure_identity_pip]: https://pypi.org/project/azure-identity/ +[default_azure_credential]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential +[pip]: https://pypi.org/project/pip/ +[azure_sub]: https://azure.microsoft.com/free/ +[evaluators]: https://learn.microsoft.com/azure/ai-studio/how-to/develop/evaluate-sdk +[azure_ai_evaluation]: https://learn.microsoft.com/python/api/overview/azure/ai-evaluation-readme +[evaluator_library]: https://learn.microsoft.com/azure/ai-studio/how-to/evaluate-generative-ai-app#view-and-manage-the-evaluators-in-the-evaluator-library \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/apiview-properties.json b/sdk/ai/azure-ai-projects-onedp/apiview-properties.json new file mode 100644 index 000000000000..14b0a819a529 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/apiview-properties.json @@ -0,0 +1,66 @@ +{ + "CrossLanguagePackageId": "Azure.AI.Projects", + "CrossLanguageDefinitionId": { + "azure.ai.projects.onedp.models.BaseCredentials": "Azure.AI.Projects.BaseCredentials", + "azure.ai.projects.onedp.models.ApiKeyCredentials": "Azure.AI.Projects.ApiKeyCredentials", + "azure.ai.projects.onedp.models.AssetCredentialResponse": "Azure.AI.Projects.AssetCredentialResponse", + "azure.ai.projects.onedp.models.Index": "Azure.AI.Projects.Index", + "azure.ai.projects.onedp.models.AzureAISearchIndex": "Azure.AI.Projects.AzureAISearchIndex", + "azure.ai.projects.onedp.models.BlobReferenceForConsumption": "Azure.AI.Projects.BlobReferenceForConsumption", + "azure.ai.projects.onedp.models.Connection": "Azure.AI.Projects.Connection", + "azure.ai.projects.onedp.models.CosmosDBIndex": "Azure.AI.Projects.CosmosDBIndex", + "azure.ai.projects.onedp.models.CustomCredential": "Azure.AI.Projects.CustomCredential", + "azure.ai.projects.onedp.models.DatasetVersion": "Azure.AI.Projects.DatasetVersion", + "azure.ai.projects.onedp.models.Deployment": "Azure.AI.Projects.Deployment", + "azure.ai.projects.onedp.models.EmbeddingConfiguration": "Azure.AI.Projects.EmbeddingConfiguration", + "azure.ai.projects.onedp.models.EntraIDCredentials": "Azure.AI.Projects.EntraIDCredentials", + "azure.ai.projects.onedp.models.Evaluation": "Azure.AI.Projects.Evaluation", + "azure.ai.projects.onedp.models.EvaluatorConfiguration": "Azure.AI.Projects.EvaluatorConfiguration", + "azure.ai.projects.onedp.models.FileDatasetVersion": "Azure.AI.Projects.FileDatasetVersion", + "azure.ai.projects.onedp.models.FolderDatasetVersion": "Azure.AI.Projects.FolderDatasetVersion", + "azure.ai.projects.onedp.models.InputData": "Azure.AI.Projects.InputData", + "azure.ai.projects.onedp.models.InputDataset": "Azure.AI.Projects.InputDataset", + "azure.ai.projects.onedp.models.ManagedAzureAISearchIndex": "Azure.AI.Projects.ManagedAzureAISearchIndex", + "azure.ai.projects.onedp.models.ModelDeployment": "Azure.AI.Projects.ModelDeployment", + "azure.ai.projects.onedp.models.NoAuthenticationCredentials": "Azure.AI.Projects.NoAuthenticationCredentials", + "azure.ai.projects.onedp.models.PendingUploadRequest": "Azure.AI.Projects.PendingUploadRequest", + "azure.ai.projects.onedp.models.PendingUploadResponse": "Azure.AI.Projects.PendingUploadResponse", + "azure.ai.projects.onedp.models.RedTeam": "Azure.AI.Projects.RedTeam", + "azure.ai.projects.onedp.models.SasCredential": "Azure.AI.Projects.SasCredential", + "azure.ai.projects.onedp.models.SASCredentials": "Azure.AI.Projects.SASCredentials", + "azure.ai.projects.onedp.models.Sku": "Azure.AI.Projects.Sku", + "azure.ai.projects.onedp.models.ConnectionType": "Azure.AI.Projects.ConnectionType", + "azure.ai.projects.onedp.models.CredentialType": "Azure.AI.Projects.CredentialType", + "azure.ai.projects.onedp.models.DatasetType": "Azure.AI.Projects.DatasetType", + "azure.ai.projects.onedp.models.ListViewType": "Azure.AI.Projects.ListViewType", + "azure.ai.projects.onedp.models.PendingUploadType": "Azure.AI.Projects.PendingUploadType", + "azure.ai.projects.onedp.models.IndexType": "Azure.AI.Projects.IndexType", + "azure.ai.projects.onedp.models.DeploymentType": "Azure.AI.Projects.DeploymentType", + "azure.ai.projects.onedp.models.AttackStrategy": "Azure.AI.Projects.AttackStrategy", + "azure.ai.projects.onedp.models.RiskCategory": "Azure.AI.Projects.RiskCategory", + "azure.ai.projects.onedp.AIProjectClient.connections.get": "Azure.AI.Projects.Connections.get", + "azure.ai.projects.onedp.AIProjectClient.connections.get_with_credentials": "Azure.AI.Projects.Connections.getWithCredentials", + "azure.ai.projects.onedp.AIProjectClient.connections.list": "Azure.AI.Projects.Connections.list", + "azure.ai.projects.onedp.AIProjectClient.connections.list_with_credentials": "Azure.AI.Projects.Connections.listWithCredentials", + "azure.ai.projects.onedp.AIProjectClient.evaluations.get": "Azure.AI.Projects.Evaluations.get", + "azure.ai.projects.onedp.AIProjectClient.evaluations.list": "Azure.AI.Projects.Evaluations.list", + "azure.ai.projects.onedp.AIProjectClient.evaluations.create_run": "Azure.AI.Projects.Evaluations.createRun", + "azure.ai.projects.onedp.AIProjectClient.datasets.list_versions": "Azure.AI.Projects.ServicePatterns.Datasets.listVersions", + "azure.ai.projects.onedp.AIProjectClient.datasets.list_latest": "Azure.AI.Projects.ServicePatterns.Datasets.listLatest", + "azure.ai.projects.onedp.AIProjectClient.datasets.get_version": "Azure.AI.Projects.ServicePatterns.Datasets.getVersion", + "azure.ai.projects.onedp.AIProjectClient.datasets.delete_version": "Azure.AI.Projects.ServicePatterns.Datasets.deleteVersion", + "azure.ai.projects.onedp.AIProjectClient.datasets.create_version": "Azure.AI.Projects.ServicePatterns.Datasets.createVersion", + "azure.ai.projects.onedp.AIProjectClient.datasets.start_pending_upload_version": "Azure.AI.Projects.Datasets.startPendingUploadVersion", + "azure.ai.projects.onedp.AIProjectClient.datasets.get_credentials": "Azure.AI.Projects.Datasets.getCredentials", + "azure.ai.projects.onedp.AIProjectClient.indexes.list_versions": "Azure.AI.Projects.ServicePatterns.Indexes.listVersions", + "azure.ai.projects.onedp.AIProjectClient.indexes.list_latest": "Azure.AI.Projects.ServicePatterns.Indexes.listLatest", + "azure.ai.projects.onedp.AIProjectClient.indexes.get_version": "Azure.AI.Projects.ServicePatterns.Indexes.getVersion", + "azure.ai.projects.onedp.AIProjectClient.indexes.delete_version": "Azure.AI.Projects.ServicePatterns.Indexes.deleteVersion", + "azure.ai.projects.onedp.AIProjectClient.indexes.create_version": "Azure.AI.Projects.ServicePatterns.Indexes.createVersion", + "azure.ai.projects.onedp.AIProjectClient.deployments.get": "Azure.AI.Projects.Deployments.get", + "azure.ai.projects.onedp.AIProjectClient.deployments.list": "Azure.AI.Projects.Deployments.list", + "azure.ai.projects.onedp.AIProjectClient.red_teams.get": "Azure.AI.Projects.RedTeams.get", + "azure.ai.projects.onedp.AIProjectClient.red_teams.list": "Azure.AI.Projects.RedTeams.list", + "azure.ai.projects.onedp.AIProjectClient.red_teams.create_run": "Azure.AI.Projects.RedTeams.createRun" + } +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/azure/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/__init__.py new file mode 100644 index 000000000000..743119593f69 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import AIProjectClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AIProjectClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_client.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_client.py new file mode 100644 index 000000000000..b96bd6b9d676 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_client.py @@ -0,0 +1,142 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING, Union +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import AIProjectClientConfiguration +from ._serialization import Deserializer, Serializer +from .operations import ( + ConnectionsOperations, + DatasetsOperations, + DeploymentsOperations, + EvaluationsOperations, + IndexesOperations, + InternalOperations, + RedTeamsOperations, + ServicePatternsOperations, +) + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class AIProjectClient: # pylint: disable=too-many-instance-attributes + """AIProjectClient. + + :ivar internal: InternalOperations operations + :vartype internal: azure.ai.projects.onedp.operations.InternalOperations + :ivar service_patterns: ServicePatternsOperations operations + :vartype service_patterns: azure.ai.projects.onedp.operations.ServicePatternsOperations + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.ai.projects.onedp.operations.ConnectionsOperations + :ivar evaluations: EvaluationsOperations operations + :vartype evaluations: azure.ai.projects.onedp.operations.EvaluationsOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: azure.ai.projects.onedp.operations.DatasetsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.onedp.operations.IndexesOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: azure.ai.projects.onedp.operations.DeploymentsOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.onedp.operations.RedTeamsOperations + :param endpoint: Project endpoint. In the form + "https://.services.ai.azure.com/api/projects/_project" + if your Foundry Hub has only one Project, or to use the default Project in your Hub. Or in the + form + "https://.services.ai.azure.com/api/projects/" + if you want to explicitly + specify the Foundry Project name. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = AIProjectClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.internal = InternalOperations(self._client, self._config, self._serialize, self._deserialize) + self.service_patterns = ServicePatternsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.evaluations = EvaluationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) + self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_configuration.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_configuration.py new file mode 100644 index 000000000000..ec2d8dc79a83 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_configuration.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for AIProjectClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Project endpoint. In the form + "https://.services.ai.azure.com/api/projects/_project" + if your Foundry Hub has only one Project, or to use the default Project in your Hub. Or in the + form + "https://.services.ai.azure.com/api/projects/" + if you want to explicitly + specify the Foundry Project name. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2025-05-15-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cognitiveservices.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "ai-projects-onedp/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "Authorization", prefix="Bearer", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_model_base.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py new file mode 100644 index 000000000000..37c3044466c1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py @@ -0,0 +1,70 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Any, Union, Optional +from azure.core.credentials import AzureKeyCredential, TokenCredential +from ._client import AIProjectClient as AIProjectClientGenerated +from .operations import TelemetryOperations, InferenceOperations, AssistantsOperations +from ._patch_prompts import PromptTemplate + + +class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-instance-attributes + """AIProjectClient. + + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.ai.projects.onedp.operations.ConnectionsOperations + :ivar assistants: AssistantsOperations operations + :vartype assistants: azure.ai.projects.onedp.operations.AssistantsOperations + :ivar inference: InferenceOperations operations + :vartype inference: azure.ai.projects.onedp.operations.InferenceOperations + :ivar telemetry: TelemetryOperations operations + :vartype telemetry: azure.ai.projects.onedp.operations.TelemetryOperations + :ivar evaluations: EvaluationsOperations operations + :vartype evaluations: azure.ai.projects.onedp.operations.EvaluationsOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: azure.ai.projects.onedp.operations.DatasetsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.onedp.operations.IndexesOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: azure.ai.projects.onedp.operations.DeploymentsOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.onedp.operations.RedTeamsOperations + :param endpoint: Project endpoint in the form of: + https://.services.ai.azure.com/api/projects/. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCredential], **kwargs: Any) -> None: + self._user_agent: Optional[str] = kwargs.get("user_agent", None) + super().__init__(endpoint=endpoint, credential=credential, **kwargs) + self.telemetry = TelemetryOperations(self) + self.inference = InferenceOperations(self) + self.assistants = AssistantsOperations(self) + + +__all__: List[str] = [ + "AIProjectClient", + "PromptTemplate", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py new file mode 100644 index 000000000000..36d66f1c7bf1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py @@ -0,0 +1,187 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=line-too-long,R,no-member +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +import traceback +import sys +from pathlib import Path +from typing import Any, Dict, List, Optional +from typing_extensions import Self + + +class PromptTemplate: + """A helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. + Prompty library is required to use this class (`pip install prompty`). + """ + + _MISSING_PROMPTY_PACKAGE_MESSAGE = ( + "The 'prompty' package is required in order to use the 'PromptTemplate' class. " + "Please install it by running 'pip install prompty'." + ) + + @classmethod + def from_prompty(cls, file_path: str) -> Self: + """Initialize a PromptTemplate object from a prompty file. + + :param file_path: The path to the prompty file. + :type file_path: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + if not file_path: + raise ValueError("Please provide file_path") + + try: + from prompty import load + except ImportError as exc: + raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + + # Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because: + # In the stack, the last function is the current function. + # The second last function is the caller function, which is the root of the file_path. + stack = traceback.extract_stack() + caller = Path(stack[-2].filename) + abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute() + + prompty = load(str(abs_file_path)) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + return cls(prompty=prompty) + + @classmethod + def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self: + """Initialize a PromptTemplate object from a message template. + + :param prompt_template: The prompt template string. + :type prompt_template: str + :param api: The API type, e.g. "chat" or "completion". + :type api: str + :param model_name: The model name, e.g. "gpt-4o-mini". + :type model_name: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + try: + from prompty import headless + except ImportError as exc: + raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + + prompt_template = cls._remove_leading_empty_space(prompt_template) + prompty = headless(api=api, content=prompt_template) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + prompty.template.parser = "prompty" + return cls( + api=api, + model_name=model_name, + prompty=prompty, + ) + + @classmethod + def _remove_leading_empty_space(cls, multiline_str: str) -> str: + """ + Processes a multiline string by: + 1. Removing empty lines + 2. Finding the minimum leading spaces + 3. Indenting all lines to the minimum level + + :param multiline_str: The input multiline string. + :type multiline_str: str + :return: The processed multiline string. + :rtype: str + """ + lines = multiline_str.splitlines() + start_index = 0 + while start_index < len(lines) and lines[start_index].strip() == "": + start_index += 1 + + # Find the minimum number of leading spaces + min_spaces = sys.maxsize + for line in lines[start_index:]: + if len(line.strip()) == 0: + continue + spaces = len(line) - len(line.lstrip()) + spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces + min_spaces = min(min_spaces, spaces) + + # Remove leading spaces and indent to the minimum level + processed_lines = [] + for line in lines[start_index:]: + processed_lines.append(line[min_spaces:]) + + return "\n".join(processed_lines) + + def __init__( + self, + *, + api: str = "chat", + prompty: Optional["Prompty"] = None, # type: ignore[name-defined] + prompt_template: Optional[str] = None, + model_name: Optional[str] = None, + ) -> None: + """Create a PromptTemplate object. + + :keyword api: The API type. + :paramtype api: str + :keyword prompty: Optional Prompty object. + :paramtype prompty: ~prompty.Prompty or None. + :keyword prompt_template: Optional prompt template string. + :paramtype prompt_template: str or None. + :keyword model_name: Optional AI Model name. + :paramtype model_name: str or None. + """ + self.prompty = prompty + if self.prompty is not None: + self.model_name = ( + self.prompty.model.configuration["azure_deployment"] + if "azure_deployment" in self.prompty.model.configuration + else None + ) + self.parameters = self.prompty.model.parameters + self._config = {} + elif prompt_template is not None: + self.model_name = model_name + self.parameters = {} + # _config is a dict to hold the internal configuration + self._config = { + "api": api if api is not None else "chat", + "prompt_template": prompt_template, + } + else: + raise ValueError("Please pass valid arguments for PromptTemplate") + + def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]: + """Render the prompt template with the given data. + + :param data: The data to render the prompt template with. + :type data: Optional[Dict[str, Any]] + :return: The rendered prompt template. + :rtype: List[Dict[str, Any]] + """ + try: + from prompty import prepare + except ImportError as exc: + raise ImportError(self._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + + if data is None: + data = kwargs + + if self.prompty is not None: + parsed = prepare(self.prompty, data) + return parsed # type: ignore + else: + raise ValueError("Please provide valid prompt template") + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_serialization.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_validation.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_validation.py new file mode 100644 index 000000000000..752b2822f9d3 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_validation.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if method_added_on > client_api_version: + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and api_version > client_api_version + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_vendor.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_vendor.py new file mode 100644 index 000000000000..e6f010934827 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_vendor.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import json +from typing import Any, Dict, IO, List, Mapping, Optional, Tuple, Union + +from ._model_base import Model, SdkJSONEncoder + + +# file-like tuple could be `(filename, IO (or bytes))` or `(filename, IO (or bytes), content_type)` +FileContent = Union[str, bytes, IO[str], IO[bytes]] + +FileType = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], +] + + +def serialize_multipart_data_entry(data_entry: Any) -> Any: + if isinstance(data_entry, (list, tuple, dict, Model)): + return json.dumps(data_entry, cls=SdkJSONEncoder, exclude_readonly=True) + return data_entry + + +def prepare_multipart_form_data( + body: Mapping[str, Any], multipart_fields: List[str], data_fields: List[str] +) -> Tuple[List[FileType], Dict[str, Any]]: + files: List[FileType] = [] + data: Dict[str, Any] = {} + for multipart_field in multipart_fields: + multipart_entry = body.get(multipart_field) + if isinstance(multipart_entry, list): + files.extend([(multipart_field, e) for e in multipart_entry]) + elif multipart_entry: + files.append((multipart_field, multipart_entry)) + + for data_field in data_fields: + data_entry = body.get(data_field) + if data_entry: + data[data_field] = serialize_multipart_data_entry(data_entry) + + return files, data diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_version.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_version.py new file mode 100644 index 000000000000..9ab0a006e0d0 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b10" diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/__init__.py new file mode 100644 index 000000000000..d5beb6bf7f83 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import AIProjectClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AIProjectClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_client.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_client.py new file mode 100644 index 000000000000..2e60e2ccf73f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_client.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING, Union +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._serialization import Deserializer, Serializer +from ._configuration import AIProjectClientConfiguration +from .operations import ( + ConnectionsOperations, + DatasetsOperations, + DeploymentsOperations, + EvaluationsOperations, + IndexesOperations, + InternalOperations, + RedTeamsOperations, + ServicePatternsOperations, +) + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class AIProjectClient: # pylint: disable=too-many-instance-attributes + """AIProjectClient. + + :ivar internal: InternalOperations operations + :vartype internal: azure.ai.projects.onedp.aio.operations.InternalOperations + :ivar service_patterns: ServicePatternsOperations operations + :vartype service_patterns: azure.ai.projects.onedp.aio.operations.ServicePatternsOperations + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.ai.projects.onedp.aio.operations.ConnectionsOperations + :ivar evaluations: EvaluationsOperations operations + :vartype evaluations: azure.ai.projects.onedp.aio.operations.EvaluationsOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: azure.ai.projects.onedp.aio.operations.DatasetsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.onedp.aio.operations.IndexesOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: azure.ai.projects.onedp.aio.operations.DeploymentsOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.onedp.aio.operations.RedTeamsOperations + :param endpoint: Project endpoint. In the form + "https://.services.ai.azure.com/api/projects/_project" + if your Foundry Hub has only one Project, or to use the default Project in your Hub. Or in the + form + "https://.services.ai.azure.com/api/projects/" + if you want to explicitly + specify the Foundry Project name. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + self._config = AIProjectClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.internal = InternalOperations(self._client, self._config, self._serialize, self._deserialize) + self.service_patterns = ServicePatternsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.evaluations = EvaluationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.indexes = IndexesOperations(self._client, self._config, self._serialize, self._deserialize) + self.deployments = DeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.red_teams = RedTeamsOperations(self._client, self._config, self._serialize, self._deserialize) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_configuration.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_configuration.py new file mode 100644 index 000000000000..48f17c285ebf --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for AIProjectClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Project endpoint. In the form + "https://.services.ai.azure.com/api/projects/_project" + if your Foundry Hub has only one Project, or to use the default Project in your Hub. Or in the + form + "https://.services.ai.azure.com/api/projects/" + if you want to explicitly + specify the Foundry Project name. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2025-05-15-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cognitiveservices.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "ai-projects-onedp/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "Authorization", prefix="Bearer", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_patch.py new file mode 100644 index 000000000000..c784bc4ee811 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_patch.py @@ -0,0 +1,69 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Optional, Union, Any +from azure.core.credentials import AzureKeyCredential +from azure.core.credentials_async import AsyncTokenCredential +from ._client import AIProjectClient as AIProjectClientGenerated +from .operations import InferenceOperations, AssistantsOperations, TelemetryOperations + + +class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-instance-attributes + """AIProjectClient. + + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.ai.projects.onedp.aio.operations.ConnectionsOperations + :ivar assistants: AssistantsOperations operations + :vartype assistants: azure.ai.projects.onedp.aio.operations.AssistantsOperations + :ivar inference: InferenceOperations operations + :vartype inference: azure.ai.projects.onedp.aio.operations.InferenceOperations + :ivar telemetry: TelemetryOperations operations + :vartype telemetry: azure.ai.projects.onedp.aio.operations.TelemetryOperations + :ivar evaluations: EvaluationsOperations operations + :vartype evaluations: azure.ai.projects.onedp.aio.operations.EvaluationsOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: azure.ai.projects.onedp.aio.operations.DatasetsOperations + :ivar indexes: IndexesOperations operations + :vartype indexes: azure.ai.projects.onedp.aio.operations.IndexesOperations + :ivar deployments: DeploymentsOperations operations + :vartype deployments: azure.ai.projects.onedp.aio.operations.DeploymentsOperations + :ivar red_teams: RedTeamsOperations operations + :vartype red_teams: azure.ai.projects.onedp.aio.operations.RedTeamsOperations + :param endpoint: Project endpoint in the form of: + https://.services.ai.azure.com/api/projects/. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, AsyncTokenCredential], **kwargs: Any + ) -> None: + self._user_agent: Optional[str] = kwargs.get("user_agent", None) + super().__init__(endpoint=endpoint, credential=credential, **kwargs) + self.telemetry = TelemetryOperations(self) + self.inference = InferenceOperations(self) + self.assistants = AssistantsOperations(self) + + +__all__: List[str] = ["AIProjectClient"] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_vendor.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_vendor.py new file mode 100644 index 000000000000..cbaa624660e4 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/_vendor.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/__init__.py new file mode 100644 index 000000000000..2d5ade65e432 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/__init__.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import InternalOperations # type: ignore +from ._operations import ServicePatternsOperations # type: ignore +from ._operations import ConnectionsOperations # type: ignore +from ._operations import EvaluationsOperations # type: ignore +from ._operations import DatasetsOperations # type: ignore +from ._operations import IndexesOperations # type: ignore +from ._operations import DeploymentsOperations # type: ignore +from ._operations import RedTeamsOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "InternalOperations", + "ServicePatternsOperations", + "ConnectionsOperations", + "EvaluationsOperations", + "DatasetsOperations", + "IndexesOperations", + "DeploymentsOperations", + "RedTeamsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_operations.py new file mode 100644 index 000000000000..1736f8454e11 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_operations.py @@ -0,0 +1,2485 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._model_base import SdkJSONEncoder, _deserialize +from ..._serialization import Deserializer, Serializer +from ..._validation import api_version_validation +from ...operations._operations import ( + build_connections_get_request, + build_connections_get_with_credentials_request, + build_connections_list_request, + build_connections_list_with_credentials_request, + build_datasets_create_version_request, + build_datasets_delete_version_request, + build_datasets_get_credentials_request, + build_datasets_get_version_request, + build_datasets_list_latest_request, + build_datasets_list_versions_request, + build_datasets_start_pending_upload_version_request, + build_deployments_get_request, + build_deployments_list_request, + build_evaluations_create_run_request, + build_evaluations_get_request, + build_evaluations_list_request, + build_indexes_create_version_request, + build_indexes_delete_version_request, + build_indexes_get_version_request, + build_indexes_list_latest_request, + build_indexes_list_versions_request, + build_red_teams_create_run_request, + build_red_teams_get_request, + build_red_teams_list_request, +) +from .._configuration import AIProjectClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] + + +class InternalOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`internal` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + +class ServicePatternsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`service_patterns` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + self.building_blocks = ServicePatternsBuildingBlocksOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + +class ConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, without populating connection credentials. + + :param name: The name of the resource. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, with its connection credentials. + + :param name: The name of the resource. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_with_credentials_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Connection"]: + """List all connections in the project, without populating connection credentials. + + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", and "CustomKeys". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.onedp.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Connection + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_request( + connection_type=connection_type, + default_connection=default_connection, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_with_credentials( + self, + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Connection"]: + """List all connections in the project, with their connection credentials. + + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", and "CustomKeys". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.onedp.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Connection + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_with_credentials_request( + connection_type=connection_type, + default_connection=default_connection, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class EvaluationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`evaluations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + ) + async def get(self, name: str, **kwargs: Any) -> _models.Evaluation: + """Get an evaluation run by name. + + :param name: Identifier of the evaluation. Required. + :type name: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Evaluation] = kwargs.pop("cls", None) + + _request = build_evaluations_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Evaluation, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={ + "2025-05-15-preview": ["api_version", "top", "skip", "maxpagesize", "client_request_id", "accept"] + }, + ) + def list( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable["_models.Evaluation"]: + """List evaluation runs. + + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Evaluation + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Evaluation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Evaluation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_evaluations_list_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @overload + async def create_run( + self, evaluation: _models.Evaluation, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: ~azure.ai.projects.onedp.models.Evaluation + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_run( + self, evaluation: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_run( + self, evaluation: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + ) + async def create_run( + self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Is one of the following types: Evaluation, JSON, + IO[bytes] Required. + :type evaluation: ~azure.ai.projects.onedp.models.Evaluation or JSON or IO[bytes] + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Evaluation] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(evaluation, (IOBase, bytes)): + _content = evaluation + else: + _content = json.dumps(evaluation, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluations_create_run_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Evaluation, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DatasetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`datasets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions( + self, + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.DatasetVersion"]: + """List all versions of the given DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.DatasetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_versions_request( + name=name, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_latest( + self, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.DatasetVersion"]: + """List the latest version of each DatasetVersion. + + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.DatasetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_latest_request( + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + """Get the specific version of the DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to retrieve. Required. + :type version: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + + _request = build_datasets_get_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DatasetVersion, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: + """Delete the specific version of the DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the DatasetVersion to delete. Required. + :type version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_datasets_delete_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def create_version( + self, + name: str, + version: str, + body: _models.DatasetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: ~azure.ai.projects.onedp.models.DatasetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_version( + self, name: str, version: str, body: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Is one of the following types: + DatasetVersion, JSON, IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.DatasetVersion or JSON or IO[bytes] + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_create_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DatasetVersion, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def start_pending_upload_version( + self, + name: str, + version: str, + body: _models.PendingUploadRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: ~azure.ai.projects.onedp.models.PendingUploadRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def start_pending_upload_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def start_pending_upload_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def start_pending_upload_version( + self, name: str, version: str, body: Union[_models.PendingUploadRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Is one of the following types: PendingUploadRequest, + JSON, IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.PendingUploadRequest or JSON or IO[bytes] + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_start_pending_upload_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PendingUploadResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_credentials( + self, name: str, version: str, body: Any, **kwargs: Any + ) -> _models.AssetCredentialResponse: + """Get download sas for dataset version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: any + :return: AssetCredentialResponse. The AssetCredentialResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.AssetCredentialResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[_models.AssetCredentialResponse] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_get_credentials_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.AssetCredentialResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class IndexesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`indexes` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions( + self, + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Index"]: + """List all versions of the given Index. + + :param name: The name of the resource. Required. + :type name: str + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of Index + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Index] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_indexes_list_versions_request( + name=name, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_latest( + self, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Index"]: + """List the latest version of each Index. + + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of Index + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Index] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_indexes_list_latest_request( + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: + """Get the specific version of the Index. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to retrieve. Required. + :type version: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Index] = kwargs.pop("cls", None) + + _request = build_indexes_get_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Index, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: + """Delete the specific version of the Index. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the Index to delete. Required. + :type version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_indexes_delete_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def create_version( + self, name: str, version: str, body: _models.Index, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: ~azure.ai.projects.onedp.models.Index + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_version( + self, name: str, version: str, body: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Is one of the following types: Index, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.Index or JSON or IO[bytes] + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Index] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_indexes_create_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Index, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, name: str, **kwargs: Any) -> _models.Deployment: + """Get a deployed model. + + :param name: Name of the deployment. Required. + :type name: str + :return: Deployment. The Deployment is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Deployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) + + _request = build_deployments_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Deployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + *, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Deployment"]: + """List all deployed models in the project. + + :keyword model_publisher: Model publisher to filter models by. Default value is None. + :paramtype model_publisher: str + :keyword model_name: Model name (the publisher specific name) to filter models by. Default + value is None. + :paramtype model_name: str + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Deployment + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.Deployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_deployments_list_request( + model_publisher=model_publisher, + model_name=model_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class RedTeamsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`red_teams` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + ) + async def get(self, name: str, **kwargs: Any) -> _models.RedTeam: + """Get a redteam by name. + + :param name: Identifier of the red team. Required. + :type name: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + _request = build_red_teams_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={ + "2025-05-15-preview": ["api_version", "top", "skip", "maxpagesize", "client_request_id", "accept"] + }, + ) + def list( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable["_models.RedTeam"]: + """List a redteam by name. + + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of RedTeam + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.projects.onedp.models.RedTeam] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_red_teams_list_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @overload + async def create_run( + self, red_team: _models.RedTeam, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: ~azure.ai.projects.onedp.models.RedTeam + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_run( + self, red_team: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_run( + self, red_team: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + ) + async def create_run(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] + Required. + :type red_team: ~azure.ai.projects.onedp.models.RedTeam or JSON or IO[bytes] + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(red_team, (IOBase, bytes)): + _content = red_team + else: + _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_red_teams_create_run_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class ServicePatternsBuildingBlocksOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`building_blocks` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch.py new file mode 100644 index 000000000000..b7836fdb225f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch.py @@ -0,0 +1,30 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List +from ._patch_assistants_async import AssistantsOperations +from ._patch_datasets_async import DatasetsOperations +from ._patch_inference_async import InferenceOperations +from ._patch_telemetry_async import TelemetryOperations + +__all__: List[str] = [ + "InferenceOperations", + "DatasetsOperations", + "AssistantsOperations", + "TelemetryOperations", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_assistants_async.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_assistants_async.py new file mode 100644 index 000000000000..971410bc6cb4 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_assistants_async.py @@ -0,0 +1,70 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from azure.core.tracing.decorator import distributed_trace + + +class AssistantsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`assistants` attribute. + """ + + # TODO: Merge all code related to handling user-agent, into a single place. + def __init__(self, outer_instance: "azure.ai.projects.onedp.aio.AIProjectClient") -> None: # type: ignore[name-defined] + + # All returned inference clients will have this application id set on their user-agent. + # For more info on user-agent HTTP header, see: + # https://azure.github.io/azure-sdk/general_azurecore.html#telemetry-policy + USER_AGENT_APP_ID = "AIProjectClient" + + if hasattr(outer_instance, "_user_agent") and outer_instance._user_agent: + # If the calling application has set "user_agent" when constructing the AIProjectClient, + # take that value and prepend it to USER_AGENT_APP_ID. + self._user_agent = f"{outer_instance._user_agent}-{USER_AGENT_APP_ID}" + else: + self._user_agent = USER_AGENT_APP_ID + + self._outer_instance = outer_instance + + @distributed_trace + def get_client(self, **kwargs) -> "AssistantsClient": # type: ignore[name-defined] + """Get an authenticated asynchronous AssistantsClient (from the package azure-ai-assistants) to use with + your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + .. note:: The package `azure-ai-assistants` must be installed prior to calling this method. + + :return: An authenticated Assistant Client. + :rtype: ~azure.ai.assistants.AssistantsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-assistants` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.assistants.aio import AssistantsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Assistant SDK is not installed. Please install it using 'pip install azure-ai-assistants'" + ) from e + + client = AssistantsClient( + endpoint=self._outer_instance._config.endpoint, # pylint: disable=protected-access + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_datasets_async.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_datasets_async.py new file mode 100644 index 000000000000..1f3679926c6b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_datasets_async.py @@ -0,0 +1,217 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +import logging +from typing import Any, Tuple +from pathlib import Path +from azure.storage.blob.aio import ContainerClient +from azure.core.tracing.decorator_async import distributed_trace_async + +from ._operations import DatasetsOperations as DatasetsOperationsGenerated +from ...models._models import ( + DatasetVersion, + PendingUploadRequest, + PendingUploadType, + PendingUploadResponse, +) +from ...models._enums import DatasetType, CredentialType + +logger = logging.getLogger(__name__) + + +class DatasetsOperations(DatasetsOperationsGenerated): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`datasets` attribute. + """ + + # Internal helper method to create a new dataset and return a ContainerClient from azure-storage-blob package, + # to the dataset's blob storage. + async def _create_dataset_and_get_its_container_client( + self, + name: str, + input_version: str, + ) -> Tuple[ContainerClient, str]: + + pending_upload_response: PendingUploadResponse = await self.start_pending_upload_version( + name=name, + version=input_version, + body=PendingUploadRequest(pending_upload_type=PendingUploadType.TEMPORARY_BLOB_REFERENCE), + ) + output_version: str = input_version + + if not pending_upload_response.blob_reference_for_consumption: + raise ValueError("Blob reference for consumption is not present") + if not pending_upload_response.blob_reference_for_consumption.credential.type: + raise ValueError("Credential type is not present") + if pending_upload_response.blob_reference_for_consumption.credential.type != CredentialType.SAS: + raise ValueError("Credential type is not SAS") + if not pending_upload_response.blob_reference_for_consumption.blob_uri: + raise ValueError("Blob URI is not present or empty") + + if logger.getEffectiveLevel() == logging.DEBUG: + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.pending_upload_id = %s.", + pending_upload_response.pending_upload_id, + ) + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.pending_upload_type = %s.", + pending_upload_response.pending_upload_type, + ) # == PendingUploadType.TEMPORARY_BLOB_REFERENCE + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.blob_uri = %s.", + pending_upload_response.blob_reference_for_consumption.blob_uri, + ) # Hosted on behalf of (HOBO) not visible to the user. If the form of: "https://.blob.core.windows.net/?" + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.storage_account_arm_id = %s.", + pending_upload_response.blob_reference_for_consumption.storage_account_arm_id, + ) # /subscriptions/<>/resourceGroups/<>/Microsoft.Storage/accounts/<> + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.credential.sas_uri = %s.", + pending_upload_response.blob_reference_for_consumption.credential.sas_uri, + ) + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.credential.type = %s.", + pending_upload_response.blob_reference_for_consumption.credential.type, + ) # == CredentialType.SAS + + # For overview on Blob storage SDK in Python see: + # https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-python + # https://learn.microsoft.com/azure/storage/blobs/storage-blob-upload-python + + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-from-container-url + return ( + await ContainerClient.from_container_url( + container_url=pending_upload_response.blob_reference_for_consumption.blob_uri, # Of the form: "https://.blob.core.windows.net/?" + ), + output_version, + ) + + @distributed_trace_async + async def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + """Upload file to a blob storage, and create a dataset that references this file. + This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package + to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. + + :keyword name: The name of the dataset. Required. + :paramtype name: str + :keyword version: The version identifier for the dataset. Required. + :paramtype version: str + :keyword file: The file name (including optional path) to be uploaded. Required. + :paramtype file: str + :return: The created dataset version. + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. + """ + + path_file = Path(file) + if not path_file.exists(): + raise ValueError("The provided file does not exist.") + if path_file.is_dir(): + raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead") + + container_client, output_version = await self._create_dataset_and_get_its_container_client( + name=name, input_version=version + ) + + async with container_client: + + with open(file=file, mode="rb") as data: # TODO: What is the best async options for file reading? + + blob_name = path_file.name # Extract the file name from the path. + logger.debug( + "[upload_file_and_create] Start uploading file `%s` as blob `%s`.", + file, + blob_name, + ) + + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob + with await container_client.upload_blob(name=blob_name, data=data, **kwargs) as blob_client: + + logger.debug("[upload_file_and_create] Done uploading") + + dataset_version = await self.create_version( + name=name, + version=output_version, + body=DatasetVersion( + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url + # Per above doc the ".url" contains SAS token... should this be stripped away? + dataset_uri=blob_client.url, # ".blob.windows.core.net//" + type=DatasetType.URI_FILE, + ), + ) + + return dataset_version + + @distributed_trace_async + async def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: + """Upload all files in a folder and its sub folders to a blob storage, while maintaining + relative paths, and create a dataset that references this folder. + This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package + to upload each file. Any keyword arguments provided will be passed to the `upload_blob` method. + + :keyword name: The name of the dataset. Required. + :paramtype name: str + :keyword version: The version identifier for the dataset. Required. + :paramtype version: str + :keyword folder: The folder name (including optional path) to be uploaded. Required. + :paramtype file: str + :return: The created dataset version. + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. + """ + path_folder = Path(folder) + if not Path(path_folder).exists(): + raise ValueError("The provided folder does not exist.") + if Path(path_folder).is_file(): + raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.") + + container_client, output_version = await self._create_dataset_and_get_its_container_client( + name=name, input_version=version + ) + + async with container_client: + + # Recursively traverse all files in the folder + files_uploaded: bool = False + for file_path in path_folder.rglob("*"): # `rglob` matches all files and folders recursively + if file_path.is_file(): # Check if the path is a file. Skip folders. + blob_name = file_path.relative_to(path_folder) # Blob name relative to the folder + logger.debug( + "[upload_folder_and_create] Start uploading file `%s` as blob `%s`.", + file_path, + blob_name, + ) + with file_path.open( + "rb" + ) as data: # Open the file for reading in binary mode # TODO: async version? + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob + container_client.upload_blob(name=str(blob_name), data=data, **kwargs) + logger.debug("[upload_folder_and_create] Done uploaded.") + files_uploaded = True + + if not files_uploaded: + raise ValueError("The provided folder is empty.") + + dataset_version = await self.create_version( + name=name, + version=output_version, + body=DatasetVersion( + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url + # Per above doc the ".url" contains SAS token... should this be stripped away? + dataset_uri=container_client.url, # ".blob.windows.core.net/ ?" + type=DatasetType.URI_FOLDER, + ), + ) + + return dataset_version diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_inference_async.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_inference_async.py new file mode 100644 index 000000000000..6cb28b2e346b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_inference_async.py @@ -0,0 +1,297 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +import logging +from typing import Optional, AsyncIterable +from urllib.parse import urlparse +from azure.core.exceptions import ResourceNotFoundError +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + +from ...models._models import ( + Connection, + ApiKeyCredentials, + EntraIDCredentials, +) +from ...models._enums import CredentialType, ConnectionType + +logger = logging.getLogger(__name__) + + +class InferenceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`inference` attribute. + """ + + def __init__(self, outer_instance: "azure.ai.projects.onedp.aio.AIProjectClient") -> None: # type: ignore[name-defined] + + # TODO: Put the user agent initialization code below in a common place used by both sync and async operations. + + # All returned inference clients will have this application id set on their user-agent. + # For more info on user-agent HTTP header, see: + # https://azure.github.io/azure-sdk/general_azurecore.html#telemetry-policy + USER_AGENT_APP_ID = "AIProjectClient" + + if hasattr(outer_instance, "_user_agent") and outer_instance._user_agent: + # If the calling application has set "user_agent" when constructing the AIProjectClient, + # take that value and prepend it to USER_AGENT_APP_ID. + self._user_agent = f"{outer_instance._user_agent}-{USER_AGENT_APP_ID}" + else: + self._user_agent = USER_AGENT_APP_ID + + self._outer_instance = outer_instance + + # TODO: Use a common method for both the sync and async operations + @classmethod + def _get_inference_url(cls, input_url: str) -> str: + """ + Converts an input URL in the format: + https:/// + to: + https:///api/models + + :param input_url: The input endpoint URL used to construct AIProjectClient. + :type input_url: str + + :return: The endpoint URL required to construct inference clients from the azure-ai-inference package. + :rtype: str + """ + parsed = urlparse(input_url) + if parsed.scheme != "https" or not parsed.netloc: + raise ValueError("Invalid endpoint URL format. Must be an https URL with a host.") + new_url = f"https://{parsed.netloc}/api/models" + return new_url + + @distributed_trace + def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # type: ignore[name-defined] + """Get an authenticated asynchronous ChatCompletionsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports chat completions must be deployed. + + .. note:: The package `azure-ai-inference` and `aiohttp` must be installed prior to calling this method. + + :return: An authenticated chat completions client. + :rtype: ~azure.ai.inference.aio.ChatCompletionsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference.aio import ChatCompletionsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = ChatCompletionsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace + def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore[name-defined] + """Get an authenticated asynchronous EmbeddingsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports text embeddings must be deployed. + + .. note:: The package `azure-ai-inference` and `aiohttp` must be installed prior to calling this method. + + :return: An authenticated Embeddings client. + :rtype: ~azure.ai.inference.aio.EmbeddingsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference.aio import EmbeddingsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = EmbeddingsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace + def get_image_embeddings_client(self, **kwargs) -> "ImageEmbeddingsClient": # type: ignore[name-defined] + """Get an authenticated asynchronous ImageEmbeddingsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports image embeddings must be deployed. + + .. note:: The package `azure-ai-inference` and `aiohttp` must be installed prior to calling this method. + + :return: An authenticated Image Embeddings client. + :rtype: ~azure.ai.inference.aio.ImageEmbeddingsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference.aio import ImageEmbeddingsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = ImageEmbeddingsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace_async + async def get_azure_openai_client( + self, *, api_version: Optional[str] = None, connection_name: Optional[str] = None, **kwargs + ) -> "AsyncAzureOpenAI": # type: ignore[name-defined] + """Get an authenticated AsyncAzureOpenAI client (from the `openai` package) for the default + Azure OpenAI connection (if `connection_name` is not specificed), or from the Azure OpenAI + resource given by its connection name. + + .. note:: The package `openai` must be installed prior to calling this method. + + :keyword api_version: The Azure OpenAI api-version to use when creating the client. Optional. + See "Data plane - Inference" row in the table at + https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs. If this keyword + is not specified, you must set the environment variable `OPENAI_API_VERSION` instead. + :paramtype api_version: str + :keyword connection_name: The name of a connection to an Azure OpenAI resource in your AI Foundry project. + resource. Optional. If not provided, the default Azure OpenAI connection will be used. + :type connection_name: str + + :return: An authenticated AsyncAzureOpenAI client + :rtype: ~openai.AzureAsyncAzureOpenAIOpenAI + + :raises ~azure.core.exceptions.ResourceNotFoundError: if an Azure OpenAI connection + does not exist. + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `openai` package + is not installed. + :raises ValueError: if the connection name is an empty string. + :raises ~azure.core.exceptions.HttpResponseError: + """ + if connection_name is not None and not connection_name: + raise ValueError("Connection name cannot be empty") + + try: + from openai import AsyncAzureOpenAI + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "OpenAI SDK is not installed. Please install it using 'pip install openai'" + ) from e + + connection = Connection() + if connection_name: + connection = await self._outer_instance.connections.get(name=connection_name, **kwargs) + if connection.type != ConnectionType.AZURE_OPEN_AI: + raise ValueError(f"Connection `{connection_name}` is not of type Azure OpenAI.") + else: + # If connection name was not specified, try to get the default Azure OpenAI connection. + connections: AsyncIterable[Connection] = self._outer_instance.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, default_connection=True, **kwargs + ) + try: + connection = await connections.__aiter__().__anext__() + except StopAsyncIteration as exc: + raise ResourceNotFoundError("No default Azure OpenAI connection found.") from exc + + # TODO: if there isn't a default openai connection, we would have to by convention + # use https://{resource-name}.openai.azure.com where {resource-name} is the same as the + # foundry API endpoint (https://{resource-name}.services.ai.azure.com) + + # If the connection uses API key authentication, we need to make another service call to get + # the connection with API key populated. + if connection.credentials.auth_type == CredentialType.API_KEY: + connection = await self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs) + + logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection)) + + azure_endpoint = connection.target[:-1] if connection.target.endswith("/") else connection.target + + if isinstance(connection.credentials, ApiKeyCredentials): + + logger.debug( + "[InferenceOperations.get_azure_openai_client] Creating AzureOpenAI using API key authentication" + ) + api_key = connection.credentials.api_key + client = AsyncAzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + + elif isinstance(connection.credentials, EntraIDCredentials): + + logger.debug( + "[InferenceOperations.get_azure_openai_client] Creating AzureOpenAI using Entra ID authentication" + ) + + try: + from azure.identity.aio import get_bearer_token_provider + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "azure.identity package not installed. Please install it using 'pip install azure.identity'" + ) from e + + client = AsyncAzureOpenAI( + # See https://learn.microsoft.com/python/api/azure-identity/azure.identity?view=azure-python#azure-identity-get-bearer-token-provider # pylint: disable=line-too-long + azure_ad_token_provider=get_bearer_token_provider( + self._outer_instance._config.credential, # pylint: disable=protected-access + "https://cognitiveservices.azure.com/.default", # pylint: disable=protected-access + ), + azure_endpoint=azure_endpoint, + api_version=api_version, + ) + + else: + raise ValueError("Unsupported authentication type {connection.auth_type}") + + return client diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_telemetry_async.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_telemetry_async.py new file mode 100644 index 000000000000..860c49bf9b00 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/aio/operations/_patch_telemetry_async.py @@ -0,0 +1,70 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Optional, AsyncIterable +from azure.core.exceptions import ResourceNotFoundError +from azure.core.tracing.decorator_async import distributed_trace_async + +from ...models._models import ( + Connection, + ApiKeyCredentials, +) +from ...models._enums import ConnectionType + + +class TelemetryOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.aio.AIProjectClient`'s + :attr:`telemetry` attribute. + """ + + _connection_string: Optional[str] = None + + def __init__(self, outer_instance: "azure.ai.projects.onedp.aio.AIProjectClient") -> None: # type: ignore[name-defined] + self._outer_instance = outer_instance + + @distributed_trace_async + async def get_connection_string(self) -> str: + """Get the Application Insights connection string associated with the Project's Application Insights resource. + + :return: The Application Insights connection string if a the resource was enabled for the Project. + :rtype: str + :raises ~azure.core.exceptions.ResourceNotFoundError: An Application Insights connection does not + exist for this Foundry project. + """ + if not self._connection_string: + + # TODO: Two REST APIs calls can be replaced by one if we have had REST API for get_with_credentials(connection_type=ConnectionType.APPLICATION_INSIGHTS) + # Returns an empty Iterable if no connections exits. + connections: AsyncIterable[Connection] = self._outer_instance.connections.list( + connection_type=ConnectionType.APPLICATION_INSIGHTS, + default_connection=True, + ) + + connection_name: Optional[str] = None + async for connection in connections: + connection_name = connection.name + break + if not connection_name: + raise ResourceNotFoundError("No Application Insights connection found.") + + connection = await self._outer_instance.connections.get_with_credentials(name=connection_name) + + if isinstance(connection.credentials, ApiKeyCredentials): + if not connection.credentials.api_key: + raise ValueError("Application Insights connection does not have a connection string.") + self._connection_string = connection.credentials.api_key + else: + raise ValueError("Application Insights connection does not use API Key credentials.") + + return self._connection_string diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/__init__.py new file mode 100644 index 000000000000..d7d4ccea33e0 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/__init__.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + ApiKeyCredentials, + AssetCredentialResponse, + AzureAISearchIndex, + BaseCredentials, + BlobReferenceForConsumption, + Connection, + CosmosDBIndex, + CustomCredential, + DatasetVersion, + Deployment, + EmbeddingConfiguration, + EntraIDCredentials, + Evaluation, + EvaluatorConfiguration, + FileDatasetVersion, + FolderDatasetVersion, + Index, + InputData, + InputDataset, + ManagedAzureAISearchIndex, + ModelDeployment, + NoAuthenticationCredentials, + PendingUploadRequest, + PendingUploadResponse, + RedTeam, + SASCredentials, + SasCredential, + Sku, +) + +from ._enums import ( # type: ignore + AttackStrategy, + ConnectionType, + CredentialType, + DatasetType, + DeploymentType, + IndexType, + ListViewType, + PendingUploadType, + RiskCategory, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ApiKeyCredentials", + "AssetCredentialResponse", + "AzureAISearchIndex", + "BaseCredentials", + "BlobReferenceForConsumption", + "Connection", + "CosmosDBIndex", + "CustomCredential", + "DatasetVersion", + "Deployment", + "EmbeddingConfiguration", + "EntraIDCredentials", + "Evaluation", + "EvaluatorConfiguration", + "FileDatasetVersion", + "FolderDatasetVersion", + "Index", + "InputData", + "InputDataset", + "ManagedAzureAISearchIndex", + "ModelDeployment", + "NoAuthenticationCredentials", + "PendingUploadRequest", + "PendingUploadResponse", + "RedTeam", + "SASCredentials", + "SasCredential", + "Sku", + "AttackStrategy", + "ConnectionType", + "CredentialType", + "DatasetType", + "DeploymentType", + "IndexType", + "ListViewType", + "PendingUploadType", + "RiskCategory", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_enums.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_enums.py new file mode 100644 index 000000000000..a81e68e9cc9d --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_enums.py @@ -0,0 +1,138 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Strategies for attacks.""" + + EASY = "easy" + """Represents a default set of easy complexity attacks. Easy complexity attack strategies are + defined as attacks that do not require any Large Language Model to convert or orchestrate.""" + ASCII_ART = "ascii_art" + """Represents ASCII art, a graphic design technique that uses printable characters.""" + ASCII_SMUGGLER = "ascii_smuggler" + """Represents ASCII smuggling, a technique for encoding or hiding data.""" + ATBASH = "atbash" + """Represents the Atbash cipher, a substitution cipher that reverses the alphabet.""" + BASE64 = "base64" + """Represents Base64 encoding, a method for encoding binary data as text.""" + BINARY = "binary" + """Represents binary encoding, a representation of data in binary format.""" + CAESAR = "caesar" + """Represents the Caesar cipher, a substitution cipher that shifts characters.""" + CHARACTER_SPACE = "character_space" + """Represents character space manipulation, a technique involving spacing between characters.""" + JAILBREAK = "jailbreak" + """Represents character swapping, a technique for rearranging characters in text.""" + + +class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Type (or category) of the connection.""" + + AZURE_OPEN_AI = "AzureOpenAI" + """Azure OpenAI Service""" + AZURE_BLOB_STORAGE = "AzureBlob" + """Azure Blob Storage, with specified container""" + AZURE_STORAGE_ACCOUNT = "AzureStorageAccount" + """Azure Blob Storage, with container not specified (used by Assistants)""" + AZURE_AI_SEARCH = "CognitiveSearch" + """Azure AI Search""" + COSMOS_DB = "CosmosDB" + """CosmosDB""" + API_KEY = "ApiKey" + """Generic connection that uses API Key authentication""" + APPLICATION_CONFIGURATION = "AppConfig" + """Application Configuration""" + APPLICATION_INSIGHTS = "AppInsights" + """Application Insights""" + CUSTOM = "CustomKeys" + """Custom Keys""" + + +class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The credential type used by the connection.""" + + API_KEY = "ApiKey" + """API Key credential""" + ENTRA_ID = "AAD" + """Entra ID credential (formerly known as AAD)""" + SAS = "SAS" + """Shared Access Signature (SAS) credential""" + CUSTOM = "CustomKeys" + """Custom credential""" + NONE = "None" + """No credential""" + + +class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + URI_FILE = "uri_file" + """URI file.""" + URI_FOLDER = "uri_folder" + """URI folder.""" + + +class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of DeploymentType.""" + + MODEL_DEPLOYMENT = "ModelDeployment" + """Model deployment""" + + +class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of IndexType.""" + + AZURE_SEARCH = "AzureSearch" + """Azure search""" + COSMOS_DB = "CosmosDBNoSqlVectorStore" + """CosmosDB""" + MANAGED_AZURE_SEARCH = "ManagedAzureSearch" + """Managed Azure Search""" + + +class ListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """List View Type Definition.""" + + ACTIVE_ONLY = "ActiveOnly" + """List only active items.""" + ARCHIVED_ONLY = "ArchivedOnly" + """List only archived items.""" + ALL = "All" + """List all items.""" + + +class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of pending upload.""" + + NONE = "None" + """No pending upload.""" + TEMPORARY_BLOB_REFERENCE = "TemporaryBlobReference" + """Temporary Blob Reference is the only supported type.""" + + +class RiskCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Risk category for the attack objective.""" + + HATE_UNFAIRNESS = "HateUnfairness" + """Represents content related to hate or unfairness.""" + VIOLENCE = "Violence" + """Represents content related to violence.""" + SEXUAL = "Sexual" + """Represents content of a sexual nature.""" + SELF_HARM = "SelfHarm" + """Represents content related to self-harm.""" + PROTECTED_MATERIAL = "ProtectedMaterial" + """Represents content involving protected material.""" + CODE_VULNERABILITY = "CodeVulnerability" + """Represents content related to code vulnerabilities.""" + UNGROUNDED_ATTRIBUTES = "UngroundedAttributes" + """Represents content with ungrounded attributes.""" diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_models.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_models.py new file mode 100644 index 000000000000..c2294f75df76 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_models.py @@ -0,0 +1,1276 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .. import _model_base +from .._model_base import rest_discriminator, rest_field +from ._enums import CredentialType, DatasetType, DeploymentType, IndexType, PendingUploadType + +if TYPE_CHECKING: + from .. import models as _models + + +class BaseCredentials(_model_base.Model): + """A base class for connection credentials. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EntraIDCredentials, ApiKeyCredentials, CustomCredential, NoAuthenticationCredentials, + SASCredentials + + :ivar auth_type: The type of credential used by the connection. Required. Known values are: + "ApiKey", "AAD", "SAS", "CustomKeys", and "None". + :vartype auth_type: str or ~azure.ai.projects.onedp.models.CredentialType + """ + + __mapping__: Dict[str, _model_base.Model] = {} + auth_type: str = rest_discriminator(name="authType", visibility=["read"]) + """The type of credential used by the connection. Required. Known values are: \"ApiKey\", \"AAD\", + \"SAS\", \"CustomKeys\", and \"None\".""" + + @overload + def __init__( + self, + *, + auth_type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): + """API Key Credential definition. + + :ivar auth_type: The credentail type. Required. API Key credential + :vartype auth_type: str or ~azure.ai.projects.onedp.models.API_KEY + :ivar api_key: API Key. + :vartype api_key: str + """ + + auth_type: Literal[CredentialType.API_KEY] = rest_discriminator(name="authType", visibility=["read"]) # type: ignore + """The credentail type. Required. API Key credential""" + api_key: Optional[str] = rest_field(name="apiKey", visibility=["read"]) + """API Key.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, auth_type=CredentialType.API_KEY, **kwargs) + + +class AssetCredentialResponse(_model_base.Model): + """Represents a reference to a blob for consumption. + + :ivar blob_reference_for_consumption: Credential info to access the storage account. Required. + :vartype blob_reference_for_consumption: + ~azure.ai.projects.onedp.models.BlobReferenceForConsumption + """ + + blob_reference_for_consumption: "_models.BlobReferenceForConsumption" = rest_field( + name="blobReferenceForConsumption", visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_reference_for_consumption: "_models.BlobReferenceForConsumption", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Index(_model_base.Model): + """Index resource Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex + + :ivar type: Type of index. Required. Known values are: "AzureSearch", + "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". + :vartype type: str or ~azure.ai.projects.onedp.models.IndexType + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: Dict[str, _model_base.Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of index. Required. Known values are: \"AzureSearch\", \"CosmosDBNoSqlVectorStore\", and + \"ManagedAzureSearch\".""" + stage: Optional[str] = rest_field(visibility=["read", "create", "update"]) + """Asset stage.""" + id: Optional[str] = rest_field(visibility=["read"]) + """A unique identifier for the asset, assetId probably?.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The asset description text.""" + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + type: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureAISearchIndex(Index, discriminator="AzureSearch"): + """Azure AI Search Index Definition. + + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Azure search + :vartype type: str or ~azure.ai.projects.onedp.models.AZURE_SEARCH + :ivar connection_name: Name of connection to Azure AI Search. Required. + :vartype connection_name: str + :ivar index_name: Name of index in Azure AI Search resource to attach. Required. + :vartype index_name: str + """ + + type: Literal[IndexType.AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Azure search""" + connection_name: str = rest_field(name="connectionName", visibility=["read", "create", "update", "delete", "query"]) + """Name of connection to Azure AI Search. Required.""" + index_name: str = rest_field(name="indexName", visibility=["read", "create", "update", "delete", "query"]) + """Name of index in Azure AI Search resource to attach. Required.""" + + @overload + def __init__( + self, + *, + connection_name: str, + index_name: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=IndexType.AZURE_SEARCH, **kwargs) + + +class BlobReferenceForConsumption(_model_base.Model): + """Represents a reference to a blob for consumption. + + :ivar blob_uri: Blob URI path for client to upload data. Example: + ``https://blob.windows.core.net/Container/Path ``. + Required. + :vartype blob_uri: str + :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. + :vartype storage_account_arm_id: str + :ivar credential: Credential info to access the storage account. Required. + :vartype credential: ~azure.ai.projects.onedp.models.SasCredential + """ + + blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) + """Blob URI path for client to upload data. Example: ``https://blob.windows.core.net/Container/Path``. Required.""" + storage_account_arm_id: str = rest_field( + name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] + ) + """ARM ID of the storage account to use. Required.""" + credential: "_models.SasCredential" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_uri: str, + storage_account_arm_id: str, + credential: "_models.SasCredential", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Connection(_model_base.Model): + """Response from the list and get connections operations. + + :ivar auth_type: Discriminator property for Connection. Required. + :vartype auth_type: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", + "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", and + "CustomKeys". + :vartype type: str or ~azure.ai.projects.onedp.models.ConnectionType + :ivar target: The connection URL to be used for this service. Required. + :vartype target: str + :ivar is_default: Whether the connection is tagged as the default connection of its type. + Required. + :vartype is_default: bool + :ivar credentials: The credentials used by the connection. Required. + :vartype credentials: ~azure.ai.projects.onedp.models.BaseCredentials + :ivar metadata: Metadata of the connection. Required. + :vartype metadata: dict[str, str] + """ + + auth_type: str = rest_discriminator(name="authType") + """Discriminator property for Connection. Required.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) + """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", + \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", + \"AppInsights\", and \"CustomKeys\".""" + target: str = rest_field(visibility=["read"]) + """The connection URL to be used for this service. Required.""" + is_default: bool = rest_field(name="isDefault", visibility=["read"]) + """Whether the connection is tagged as the default connection of its type. Required.""" + credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) + """The credentials used by the connection. Required.""" + metadata: Dict[str, str] = rest_field(visibility=["read"]) + """Metadata of the connection. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): + """CosmosDB Vector Store Index Definition. + + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. CosmosDB + :vartype type: str or ~azure.ai.projects.onedp.models.COSMOS_DB + :ivar connection_name: Name of connection to CosmosDB. Required. + :vartype connection_name: str + :ivar database_name: Name of the CosmosDB Database. Required. + :vartype database_name: str + :ivar container_name: Name of CosmosDB Container. Required. + :vartype container_name: str + :ivar embedding_configuration: Embedding model configuration. Required. + :vartype embedding_configuration: ~azure.ai.projects.onedp.models.EmbeddingConfiguration + """ + + type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. CosmosDB""" + connection_name: str = rest_field(name="connectionName", visibility=["read", "create", "update", "delete", "query"]) + """Name of connection to CosmosDB. Required.""" + database_name: str = rest_field(name="databaseName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the CosmosDB Database. Required.""" + container_name: str = rest_field(name="containerName", visibility=["read", "create", "update", "delete", "query"]) + """Name of CosmosDB Container. Required.""" + embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( + name="embeddingConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Embedding model configuration. Required.""" + + @overload + def __init__( + self, + *, + connection_name: str, + database_name: str, + container_name: str, + embedding_configuration: "_models.EmbeddingConfiguration", + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=IndexType.COSMOS_DB, **kwargs) + + +class CustomCredential(BaseCredentials, discriminator="CustomKeys"): + """Custom credential defintion. + + :ivar auth_type: The credential type. Required. Custom credential + :vartype auth_type: str or ~azure.ai.projects.onedp.models.CUSTOM + """ + + auth_type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="authType", visibility=["read"]) # type: ignore + """The credential type. Required. Custom credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, auth_type=CredentialType.CUSTOM, **kwargs) + + +class DatasetVersion(_model_base.Model): + """DatasetVersion Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FileDatasetVersion, FolderDatasetVersion + + :ivar dataset_uri: [Required] Uri of the data. Example: + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :vartype dataset_uri: str + :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". + :vartype type: str or ~azure.ai.projects.onedp.models.DatasetType + :ivar is_reference: Indicates if dataset is reference only or managed by dataset service. If + true, the underlying data will be deleted when the dataset version is deleted. + :vartype is_reference: bool + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: Dict[str, _model_base.Model] = {} + dataset_uri: str = rest_field(name="datasetUri", visibility=["read", "create"]) + """[Required] Uri of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" + is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) + """Indicates if dataset is reference only or managed by dataset service. If true, the underlying + data will be deleted when the dataset version is deleted.""" + stage: Optional[str] = rest_field(visibility=["read", "create", "update"]) + """Asset stage.""" + id: Optional[str] = rest_field(visibility=["read"]) + """A unique identifier for the asset, assetId probably?.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The asset description text.""" + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + dataset_uri: str, + type: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Deployment(_model_base.Model): + """Model Deployment Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ModelDeployment + + :ivar type: The type of the deployment. Required. "ModelDeployment" + :vartype type: str or ~azure.ai.projects.onedp.models.DeploymentType + :ivar name: Name of the deployment. Required. + :vartype name: str + """ + + __mapping__: Dict[str, _model_base.Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of the deployment. Required. \"ModelDeployment\"""" + name: str = rest_field(visibility=["read"]) + """Name of the deployment. Required.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EmbeddingConfiguration(_model_base.Model): + """Embedding configuration class. + + :ivar model_deployment_name: Deployment name of embedding model. It can point to a model + deployment either in the parent AIServices or a connection. Required. + :vartype model_deployment_name: str + :ivar embedding_field: Embedding field. Required. + :vartype embedding_field: str + """ + + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] + ) + """Deployment name of embedding model. It can point to a model deployment either in the parent + AIServices or a connection. Required.""" + embedding_field: str = rest_field(name="embeddingField", visibility=["read", "create", "update", "delete", "query"]) + """Embedding field. Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + embedding_field: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EntraIDCredentials(BaseCredentials, discriminator="AAD"): + """Entra ID credential definition. + + :ivar auth_type: The credential type. Required. Entra ID credential (formerly known as AAD) + :vartype auth_type: str or ~azure.ai.projects.onedp.models.ENTRA_ID + """ + + auth_type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="authType", visibility=["read"]) # type: ignore + """The credential type. Required. Entra ID credential (formerly known as AAD)""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, auth_type=CredentialType.ENTRA_ID, **kwargs) + + +class Evaluation(_model_base.Model): + """Evaluation Definition. + + :ivar id: Identifier of the evaluation. Required. + :vartype id: str + :ivar data: Data for evaluation. Required. + :vartype data: ~azure.ai.projects.onedp.models.InputData + :ivar display_name: Display Name for evaluation. It helps to find the evaluation easily in AI + Foundry. It does not need to be unique. + :vartype display_name: str + :ivar description: Description of the evaluation. It can be used to store additional + information about the evaluation and is mutable. + :vartype description: str + :ivar status: Status of the evaluation. It is set by service and is read-only. + :vartype status: str + :ivar tags: Evaluation's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Evaluation's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar evaluators: Evaluators to be used for the evaluation. Required. + :vartype evaluators: dict[str, ~azure.ai.projects.onedp.models.EvaluatorConfiguration] + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the evaluation. Required.""" + data: "_models.InputData" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Data for evaluation. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display Name for evaluation. It helps to find the evaluation easily in AI Foundry. It does not + need to be unique.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the evaluation. It can be used to store additional information about the + evaluation and is mutable.""" + status: Optional[str] = rest_field(visibility=["read"]) + """Status of the evaluation. It is set by service and is read-only.""" + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Evaluation's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Evaluation's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + evaluators: Dict[str, "_models.EvaluatorConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Evaluators to be used for the evaluation. Required.""" + + @overload + def __init__( + self, + *, + data: "_models.InputData", + evaluators: Dict[str, "_models.EvaluatorConfiguration"], + display_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorConfiguration(_model_base.Model): + """Evaluator Configuration. + + :ivar id: Identifier of the evaluator. Required. + :vartype id: str + :ivar init_params: Initialization parameters of the evaluator. + :vartype init_params: dict[str, any] + :ivar data_mapping: Data parameters of the evaluator. + :vartype data_mapping: dict[str, str] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the evaluator. Required.""" + init_params: Optional[Dict[str, Any]] = rest_field( + name="initParams", visibility=["read", "create", "update", "delete", "query"] + ) + """Initialization parameters of the evaluator.""" + data_mapping: Optional[Dict[str, str]] = rest_field( + name="dataMapping", visibility=["read", "create", "update", "delete", "query"] + ) + """Data parameters of the evaluator.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + init_params: Optional[Dict[str, Any]] = None, + data_mapping: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): + """FileDatasetVersion Definition. + + :ivar dataset_uri: [Required] Uri of the data. Example: + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :vartype dataset_uri: str + :ivar is_reference: Indicates if dataset is reference only or managed by dataset service. If + true, the underlying data will be deleted when the dataset version is deleted. + :vartype is_reference: bool + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI file. + :vartype type: str or ~azure.ai.projects.onedp.models.URI_FILE + :ivar open_ai_purpose: Indicates OpenAI Purpose. FileDatasets created with this field will be + compatible with OpenAI-specific features. Required. + :vartype open_ai_purpose: str + """ + + type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI file.""" + open_ai_purpose: str = rest_field(name="openAIPurpose", visibility=["read", "create", "update", "delete", "query"]) + """Indicates OpenAI Purpose. FileDatasets created with this field will be compatible with + OpenAI-specific features. Required.""" + + @overload + def __init__( + self, + *, + dataset_uri: str, + open_ai_purpose: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DatasetType.URI_FILE, **kwargs) + + +class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): + """FileDatasetVersion Definition. + + :ivar dataset_uri: [Required] Uri of the data. Example: + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. + :vartype dataset_uri: str + :ivar is_reference: Indicates if dataset is reference only or managed by dataset service. If + true, the underlying data will be deleted when the dataset version is deleted. + :vartype is_reference: bool + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI folder. + :vartype type: str or ~azure.ai.projects.onedp.models.URI_FOLDER + """ + + type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI folder.""" + + @overload + def __init__( + self, + *, + dataset_uri: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DatasetType.URI_FOLDER, **kwargs) + + +class InputData(_model_base.Model): + """Abstract data class. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + InputDataset + + :ivar type: Type of the data. Required. Default value is None. + :vartype type: str + """ + + __mapping__: Dict[str, _model_base.Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the data. Required. Default value is None.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InputDataset(InputData, discriminator="dataset"): + """Dataset as source for evaluation. + + :ivar type: Required. Default value is "dataset". + :vartype type: str + :ivar id: Evaluation input data. Required. + :vartype id: str + """ + + type: Literal["dataset"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Default value is \"dataset\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Evaluation input data. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="dataset", **kwargs) + + +class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): + """Managed Azure AI Search Index Definition. + + :ivar stage: Asset stage. + :vartype stage: str + :ivar id: A unique identifier for the asset, assetId probably?. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Managed Azure Search + :vartype type: str or ~azure.ai.projects.onedp.models.MANAGED_AZURE_SEARCH + :ivar vector_store_id: Vector store id of managed index. Required. + :vartype vector_store_id: str + """ + + type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Managed Azure Search""" + vector_store_id: str = rest_field(name="vectorStoreId", visibility=["read", "create", "update", "delete", "query"]) + """Vector store id of managed index. Required.""" + + @overload + def __init__( + self, + *, + vector_store_id: str, + stage: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=IndexType.MANAGED_AZURE_SEARCH, **kwargs) + + +class ModelDeployment(Deployment, discriminator="ModelDeployment"): + """Model Deployment Definition. + + :ivar name: Name of the deployment. Required. + :vartype name: str + :ivar type: The type of the deployment. Required. Model deployment + :vartype type: str or ~azure.ai.projects.onedp.models.MODEL_DEPLOYMENT + :ivar model_name: Publisher-specific name of the deployed model. Required. + :vartype model_name: str + :ivar model_version: Publisher-specific version of the deployed model. Required. + :vartype model_version: str + :ivar model_publisher: Name of the deployed model's publisher. Required. + :vartype model_publisher: str + :ivar capabilities: Capabilities of deployed model. Required. + :vartype capabilities: dict[str, str] + :ivar sku: Sku of the model deployment. Required. + :vartype sku: ~azure.ai.projects.onedp.models.Sku + :ivar connection_name: Name of the connection the deployment comes from. + :vartype connection_name: str + """ + + type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the deployment. Required. Model deployment""" + model_name: str = rest_field(name="modelName", visibility=["read"]) + """Publisher-specific name of the deployed model. Required.""" + model_version: str = rest_field(name="modelVersion", visibility=["read"]) + """Publisher-specific version of the deployed model. Required.""" + model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) + """Name of the deployed model's publisher. Required.""" + capabilities: Dict[str, str] = rest_field(visibility=["read"]) + """Capabilities of deployed model. Required.""" + sku: "_models.Sku" = rest_field(visibility=["read"]) + """Sku of the model deployment. Required.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) + """Name of the connection the deployment comes from.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DeploymentType.MODEL_DEPLOYMENT, **kwargs) + + +class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): + """Credentials that do not require authentication. + + :ivar auth_type: The credential type. Required. No credential + :vartype auth_type: str or ~azure.ai.projects.onedp.models.NONE + """ + + auth_type: Literal[CredentialType.NONE] = rest_discriminator(name="authType", visibility=["read"]) # type: ignore + """The credential type. Required. No credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, auth_type=CredentialType.NONE, **kwargs) + + +class PendingUploadRequest(_model_base.Model): + """Represents a request for a pending upload. + + :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. + :vartype pending_upload_id: str + :ivar connection_name: Name of Azure blob storage connection to use for generating temporary + SAS token. + :vartype connection_name: str + :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Required. + Temporary Blob Reference is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.onedp.models.TEMPORARY_BLOB_REFERENCE + """ + + pending_upload_id: Optional[str] = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """If PendingUploadId is not provided, a random GUID will be used.""" + connection_name: Optional[str] = rest_field( + name="connectionName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of Azure blob storage connection to use for generating temporary SAS token.""" + pending_upload_type: Literal[PendingUploadType.TEMPORARY_BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """TemporaryBlobReference is the only supported type. Required. Temporary Blob Reference is the + only supported type.""" + + @overload + def __init__( + self, + *, + pending_upload_type: Literal[PendingUploadType.TEMPORARY_BLOB_REFERENCE], + pending_upload_id: Optional[str] = None, + connection_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PendingUploadResponse(_model_base.Model): + """Represents the response for a pending upload request. + + :ivar blob_reference_for_consumption: Container-level read, write, list SAS. Required. + :vartype blob_reference_for_consumption: + ~azure.ai.projects.onedp.models.BlobReferenceForConsumption + :ivar pending_upload_id: ID for this upload request. Required. + :vartype pending_upload_id: str + :ivar dataset_version: Version of dataset to be created if user did not specify version when + initially creating upload. + :vartype dataset_version: str + :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Required. + Temporary Blob Reference is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.onedp.models.TEMPORARY_BLOB_REFERENCE + """ + + blob_reference_for_consumption: "_models.BlobReferenceForConsumption" = rest_field( + name="blobReferenceForConsumption", visibility=["read", "create", "update", "delete", "query"] + ) + """Container-level read, write, list SAS. Required.""" + pending_upload_id: str = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID for this upload request. Required.""" + dataset_version: Optional[str] = rest_field( + name="datasetVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """Version of dataset to be created if user did not specify version when initially creating + upload.""" + pending_upload_type: Literal[PendingUploadType.TEMPORARY_BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """TemporaryBlobReference is the only supported type. Required. Temporary Blob Reference is the + only supported type.""" + + @overload + def __init__( + self, + *, + blob_reference_for_consumption: "_models.BlobReferenceForConsumption", + pending_upload_id: str, + pending_upload_type: Literal[PendingUploadType.TEMPORARY_BLOB_REFERENCE], + dataset_version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RedTeam(_model_base.Model): + """Red team details. + + :ivar id: Identifier of the red team. Required. + :vartype id: str + :ivar scan_name: Name of the red-team scan. Required. + :vartype scan_name: str + :ivar num_turns: Number of simulation rounds. Required. + :vartype num_turns: int + :ivar attack_strategy: List of attack strategies or nested lists of attack strategies. + Required. + :vartype attack_strategy: list[str or ~azure.ai.projects.onedp.models.AttackStrategy] + :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the + scan outputs conversation not evaluation result. Required. + :vartype simulation_only: bool + :ivar risk_categories: List of risk categories to generate attack objectives for. Required. + :vartype risk_categories: list[str or ~azure.ai.projects.onedp.models.RiskCategory] + :ivar application_scenario: Application scenario for the red team operation, to generate + scenario specific attacks. + :vartype application_scenario: str + :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar status: Status of the red-team. It is set by service and is read-only. + :vartype status: str + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the red team. Required.""" + scan_name: str = rest_field(name="scanName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the red-team scan. Required.""" + num_turns: int = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) + """Number of simulation rounds. Required.""" + attack_strategy: List[Union[str, "_models.AttackStrategy"]] = rest_field( + name="attackStrategy", visibility=["read", "create", "update", "delete", "query"] + ) + """List of attack strategies or nested lists of attack strategies. Required.""" + simulation_only: bool = rest_field( + name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs + conversation not evaluation result. Required.""" + risk_categories: List[Union[str, "_models.RiskCategory"]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to generate attack objectives for. Required.""" + application_scenario: Optional[str] = rest_field( + name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] + ) + """Application scenario for the red team operation, to generate scenario specific attacks.""" + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + status: Optional[str] = rest_field(visibility=["read"]) + """Status of the red-team. It is set by service and is read-only.""" + + @overload + def __init__( + self, + *, + scan_name: str, + num_turns: int, + attack_strategy: List[Union[str, "_models.AttackStrategy"]], + simulation_only: bool, + risk_categories: List[Union[str, "_models.RiskCategory"]], + application_scenario: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SasCredential(_model_base.Model): + """SAS Credential definition. + + :ivar sas_uri: SAS uri. Required. + :vartype sas_uri: str + :ivar type: Type of credential. Required. Default value is "SAS". + :vartype type: str + """ + + sas_uri: str = rest_field(name="sasUri", visibility=["read"]) + """SAS uri. Required.""" + type: Literal["SAS"] = rest_field(visibility=["read"]) + """Type of credential. Required. Default value is \"SAS\".""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["SAS"] = "SAS" + + +class SASCredentials(BaseCredentials, discriminator="SAS"): + """Shared Access Signature (SAS) credential definition. + + :ivar auth_type: The credential type. Required. Shared Access Signature (SAS) credential + :vartype auth_type: str or ~azure.ai.projects.onedp.models.SAS + :ivar sas_token: SAS token. + :vartype sas_token: str + """ + + auth_type: Literal[CredentialType.SAS] = rest_discriminator(name="authType", visibility=["read"]) # type: ignore + """The credential type. Required. Shared Access Signature (SAS) credential""" + sas_token: Optional[str] = rest_field(name="sasToken", visibility=["read"]) + """SAS token.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, auth_type=CredentialType.SAS, **kwargs) + + +class Sku(_model_base.Model): + """Sku information. + + :ivar capacity: Sku capacity. Required. + :vartype capacity: int + :ivar family: Sku family. Required. + :vartype family: str + :ivar name: Sku name. Required. + :vartype name: str + :ivar size: Sku size. Required. + :vartype size: str + :ivar tier: Sku tier. Required. + :vartype tier: str + """ + + capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku capacity. Required.""" + family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku family. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku name. Required.""" + size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku size. Required.""" + tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku tier. Required.""" + + @overload + def __init__( + self, + *, + capacity: int, + family: str, + name: str, + size: str, + tier: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch.py new file mode 100644 index 000000000000..03a12558c643 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch.py @@ -0,0 +1,23 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List +from ._patch_evaluations import EvaluationMetrics + +__all__: List[str] = [ + "EvaluationMetrics", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch_evaluations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch_evaluations.py new file mode 100644 index 000000000000..df5067c9a3ac --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/models/_patch_evaluations.py @@ -0,0 +1,20 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +class EvaluationMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + RELEVANCE = "relevance" + HATE_UNFAIRNESS = "hate_unfairness" + VIOLENCE = "violence" + GROUNDEDNESS = "groundedness" + GROUNDEDNESS_PRO = "groundedness_pro" diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/__init__.py new file mode 100644 index 000000000000..2d5ade65e432 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/__init__.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import InternalOperations # type: ignore +from ._operations import ServicePatternsOperations # type: ignore +from ._operations import ConnectionsOperations # type: ignore +from ._operations import EvaluationsOperations # type: ignore +from ._operations import DatasetsOperations # type: ignore +from ._operations import IndexesOperations # type: ignore +from ._operations import DeploymentsOperations # type: ignore +from ._operations import RedTeamsOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "InternalOperations", + "ServicePatternsOperations", + "ConnectionsOperations", + "EvaluationsOperations", + "DatasetsOperations", + "IndexesOperations", + "DeploymentsOperations", + "RedTeamsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_operations.py new file mode 100644 index 000000000000..4fba0872b67e --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_operations.py @@ -0,0 +1,3151 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._configuration import AIProjectClientConfiguration +from .._model_base import SdkJSONEncoder, _deserialize +from .._serialization import Deserializer, Serializer +from .._validation import api_version_validation + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_connections_get_request(name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/connections/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_get_with_credentials_request( # pylint: disable=name-too-long + name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/connections/{name}/withCredentials" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_list_request( + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/connections" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if connection_type is not None: + _params["connectionType"] = _SERIALIZER.query("connection_type", connection_type, "str") + if default_connection is not None: + _params["defaultConnection"] = _SERIALIZER.query("default_connection", default_connection, "bool") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_list_with_credentials_request( # pylint: disable=name-too-long + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/connections/withCredentials" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if connection_type is not None: + _params["connectionType"] = _SERIALIZER.query("connection_type", connection_type, "str") + if default_connection is not None: + _params["defaultConnection"] = _SERIALIZER.query("default_connection", default_connection, "bool") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_evaluations_get_request(name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/evaluations/runs/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_evaluations_list_request( + *, top: Optional[int] = None, skip: Optional[int] = None, maxpagesize: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/evaluations/runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_evaluations_create_run_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/evaluations/runs:run" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_list_versions_request( + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_list_latest_request( + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_get_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_delete_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_create_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_start_pending_upload_version_request( # pylint: disable=name-too-long + name: str, version: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions/{version}/startPendingUpload" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_datasets_get_credentials_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/datasets/{name}/versions/{version}/credentials" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_indexes_list_versions_request( + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/indexes/{name}/versions" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_indexes_list_latest_request( + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/indexes" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_indexes_get_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/indexes/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_indexes_delete_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/indexes/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_indexes_create_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/indexes/{name}/versions/{version}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_deployments_get_request(name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/deployments/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_deployments_list_request( + *, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/deployments" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if model_publisher is not None: + _params["modelPublisher"] = _SERIALIZER.query("model_publisher", model_publisher, "str") + if model_name is not None: + _params["modelName"] = _SERIALIZER.query("model_name", model_name, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_red_teams_get_request(name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/redTeams/runs/{name}" + path_format_arguments = { + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_red_teams_list_request( + *, top: Optional[int] = None, skip: Optional[int] = None, maxpagesize: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/redTeams/runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_red_teams_create_run_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/redTeams/runs:run" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class InternalOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`internal` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + +class ServicePatternsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`service_patterns` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + self.building_blocks = ServicePatternsBuildingBlocksOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + +class ConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`connections` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, without populating connection credentials. + + :param name: The name of the resource. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + """Get a connection by name, with its connection credentials. + + :param name: The name of the resource. Required. + :type name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_with_credentials_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.Connection"]: + """List all connections in the project, without populating connection credentials. + + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", and "CustomKeys". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.onedp.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Connection + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_request( + connection_type=connection_type, + default_connection=default_connection, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_with_credentials( + self, + *, + connection_type: Optional[Union[str, _models.ConnectionType]] = None, + default_connection: Optional[bool] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.Connection"]: + """List all connections in the project, with their connection credentials. + + :keyword connection_type: List connections of this specific type. Known values are: + "AzureOpenAI", "AzureBlob", "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", + "AppConfig", "AppInsights", and "CustomKeys". Default value is None. + :paramtype connection_type: str or ~azure.ai.projects.onedp.models.ConnectionType + :keyword default_connection: List connections that are default connections. Default value is + None. + :paramtype default_connection: bool + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Connection + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_with_credentials_request( + connection_type=connection_type, + default_connection=default_connection, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Connection], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class EvaluationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`evaluations` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + ) + def get(self, name: str, **kwargs: Any) -> _models.Evaluation: + """Get an evaluation run by name. + + :param name: Identifier of the evaluation. Required. + :type name: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Evaluation] = kwargs.pop("cls", None) + + _request = build_evaluations_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Evaluation, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={ + "2025-05-15-preview": ["api_version", "top", "skip", "maxpagesize", "client_request_id", "accept"] + }, + ) + def list( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> Iterable["_models.Evaluation"]: + """List evaluation runs. + + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Evaluation + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Evaluation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Evaluation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_evaluations_list_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @overload + def create_run( + self, evaluation: _models.Evaluation, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: ~azure.ai.projects.onedp.models.Evaluation + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_run( + self, evaluation: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_run( + self, evaluation: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Required. + :type evaluation: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + ) + def create_run(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwargs: Any) -> _models.Evaluation: + """Creates an evaluation run. + + :param evaluation: Evaluation to be run. Is one of the following types: Evaluation, JSON, + IO[bytes] Required. + :type evaluation: ~azure.ai.projects.onedp.models.Evaluation or JSON or IO[bytes] + :return: Evaluation. The Evaluation is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Evaluation + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Evaluation] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(evaluation, (IOBase, bytes)): + _content = evaluation + else: + _content = json.dumps(evaluation, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_evaluations_create_run_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Evaluation, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DatasetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`datasets` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions( + self, + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DatasetVersion"]: + """List all versions of the given DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.DatasetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_versions_request( + name=name, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_latest( + self, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DatasetVersion"]: + """List the latest version of each DatasetVersion. + + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of DatasetVersion + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.DatasetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DatasetVersion]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_datasets_list_latest_request( + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DatasetVersion], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_version(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + """Get the specific version of the DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to retrieve. Required. + :type version: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + + _request = build_datasets_get_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DatasetVersion, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_version( # pylint: disable=inconsistent-return-statements + self, name: str, version: str, **kwargs: Any + ) -> None: + """Delete the specific version of the DatasetVersion. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the DatasetVersion to delete. Required. + :type version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_datasets_delete_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def create_version( + self, + name: str, + version: str, + body: _models.DatasetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: ~azure.ai.projects.onedp.models.DatasetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_version( + self, name: str, version: str, body: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any + ) -> _models.DatasetVersion: + """Create a new or replace an existing DatasetVersion with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to create or replace. Required. + :type version: str + :param body: The definition of the DatasetVersion to create. Is one of the following types: + DatasetVersion, JSON, IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.DatasetVersion or JSON or IO[bytes] + :return: DatasetVersion. The DatasetVersion is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_create_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DatasetVersion, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def start_pending_upload_version( + self, + name: str, + version: str, + body: _models.PendingUploadRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: ~azure.ai.projects.onedp.models.PendingUploadRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def start_pending_upload_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def start_pending_upload_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def start_pending_upload_version( + self, name: str, version: str, body: Union[_models.PendingUploadRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models.PendingUploadResponse: + """Start a new or get an existing pending upload of a dataset for a specific version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Is one of the following types: PendingUploadRequest, + JSON, IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.PendingUploadRequest or JSON or IO[bytes] + :return: PendingUploadResponse. The PendingUploadResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.PendingUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_start_pending_upload_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.PendingUploadResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_credentials(self, name: str, version: str, body: Any, **kwargs: Any) -> _models.AssetCredentialResponse: + """Get download sas for dataset version. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the DatasetVersion to operate on. Required. + :type version: str + :param body: Parameters for the action. Required. + :type body: any + :return: AssetCredentialResponse. The AssetCredentialResponse is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.AssetCredentialResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[_models.AssetCredentialResponse] = kwargs.pop("cls", None) + + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_datasets_get_credentials_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.AssetCredentialResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class IndexesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`indexes` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_versions( + self, + name: str, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.Index"]: + """List all versions of the given Index. + + :param name: The name of the resource. Required. + :type name: str + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of Index + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Index] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_indexes_list_versions_request( + name=name, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_latest( + self, + *, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.Index"]: + """List the latest version of each Index. + + :keyword top: Top count of results, top count cannot be greater than the page size. If topCount + > page size, results with be default page size count will be returned. Default value is None. + :paramtype top: int + :keyword skip: Continuation token for pagination. Default value is None. + :paramtype skip: str + :keyword tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :paramtype tags: str + :keyword list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, ListViewType.All] + View type for including/excluding (for example) archived entities. Known values are: + "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :paramtype list_view_type: str or ~azure.ai.projects.onedp.models.ListViewType + :return: An iterator like instance of Index + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Index] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Index]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_indexes_list_latest_request( + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Index], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: + """Get the specific version of the Index. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to retrieve. Required. + :type version: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Index] = kwargs.pop("cls", None) + + _request = build_indexes_get_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Index, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_version( # pylint: disable=inconsistent-return-statements + self, name: str, version: str, **kwargs: Any + ) -> None: + """Delete the specific version of the Index. + + :param name: The name of the resource. Required. + :type name: str + :param version: The version of the Index to delete. Required. + :type version: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_indexes_delete_version_request( + name=name, + version=version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def create_version( + self, name: str, version: str, body: _models.Index, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: ~azure.ai.projects.onedp.models.Index + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_version( + self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_version( + self, name: str, version: str, body: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any + ) -> _models.Index: + """Create a new or replace an existing Index with the given version id. + + :param name: The name of the resource. Required. + :type name: str + :param version: The specific version id of the Index to create or replace. Required. + :type version: str + :param body: The definition of the Index to create. Is one of the following types: Index, JSON, + IO[bytes] Required. + :type body: ~azure.ai.projects.onedp.models.Index or JSON or IO[bytes] + :return: Index. The Index is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Index + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Index] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_indexes_create_version_request( + name=name, + version=version, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Index, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class DeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`deployments` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, name: str, **kwargs: Any) -> _models.Deployment: + """Get a deployed model. + + :param name: Name of the deployment. Required. + :type name: str + :return: Deployment. The Deployment is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.Deployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Deployment] = kwargs.pop("cls", None) + + _request = build_deployments_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.Deployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + *, + model_publisher: Optional[str] = None, + model_name: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.Deployment"]: + """List all deployed models in the project. + + :keyword model_publisher: Model publisher to filter models by. Default value is None. + :paramtype model_publisher: str + :keyword model_name: Model name (the publisher specific name) to filter models by. Default + value is None. + :paramtype model_name: str + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of Deployment + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.Deployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.Deployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_deployments_list_request( + model_publisher=model_publisher, + model_name=model_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Deployment], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class RedTeamsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`red_teams` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "name", "client_request_id", "accept"]}, + ) + def get(self, name: str, **kwargs: Any) -> _models.RedTeam: + """Get a redteam by name. + + :param name: Identifier of the red team. Required. + :type name: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + _request = build_red_teams_get_request( + name=name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={ + "2025-05-15-preview": ["api_version", "top", "skip", "maxpagesize", "client_request_id", "accept"] + }, + ) + def list( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> Iterable["_models.RedTeam"]: + """List a redteam by name. + + :keyword top: The number of result items to return. Default value is None. + :paramtype top: int + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :return: An iterator like instance of RedTeam + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.projects.onedp.models.RedTeam] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_models.RedTeam]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_red_teams_list_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.RedTeam], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @overload + def create_run( + self, red_team: _models.RedTeam, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: ~azure.ai.projects.onedp.models.RedTeam + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_run(self, red_team: JSON, *, content_type: str = "application/json", **kwargs: Any) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_run( + self, red_team: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Required. + :type red_team: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "accept"]}, + ) + def create_run(self, red_team: Union[_models.RedTeam, JSON, IO[bytes]], **kwargs: Any) -> _models.RedTeam: + """Creates a redteam run. + + :param red_team: Redteam to be run. Is one of the following types: RedTeam, JSON, IO[bytes] + Required. + :type red_team: ~azure.ai.projects.onedp.models.RedTeam or JSON or IO[bytes] + :return: RedTeam. The RedTeam is compatible with MutableMapping + :rtype: ~azure.ai.projects.onedp.models.RedTeam + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.RedTeam] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(red_team, (IOBase, bytes)): + _content = red_team + else: + _content = json.dumps(red_team, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_red_teams_create_run_request( + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RedTeam, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class ServicePatternsBuildingBlocksOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`building_blocks` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch.py new file mode 100644 index 000000000000..6a5a64cfd8d1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch.py @@ -0,0 +1,31 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List +from ._patch_assistants import AssistantsOperations +from ._patch_datasets import DatasetsOperations +from ._patch_inference import InferenceOperations +from ._patch_telemetry import TelemetryOperations + + +__all__: List[str] = [ + "InferenceOperations", + "TelemetryOperations", + "DatasetsOperations", + "AssistantsOperations", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_assistants.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_assistants.py new file mode 100644 index 000000000000..8351c128f8ed --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_assistants.py @@ -0,0 +1,70 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from azure.core.tracing.decorator import distributed_trace + + +class AssistantsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`assistants` attribute. + """ + + # TODO: Merge all code related to handling user-agent, into a single place. + def __init__(self, outer_instance: "azure.ai.projects.onedp.AIProjectClient") -> None: # type: ignore[name-defined] + + # All returned inference clients will have this application id set on their user-agent. + # For more info on user-agent HTTP header, see: + # https://azure.github.io/azure-sdk/general_azurecore.html#telemetry-policy + USER_AGENT_APP_ID = "AIProjectClient" + + if hasattr(outer_instance, "_user_agent") and outer_instance._user_agent: + # If the calling application has set "user_agent" when constructing the AIProjectClient, + # take that value and prepend it to USER_AGENT_APP_ID. + self._user_agent = f"{outer_instance._user_agent}-{USER_AGENT_APP_ID}" + else: + self._user_agent = USER_AGENT_APP_ID + + self._outer_instance = outer_instance + + @distributed_trace + def get_client(self, **kwargs) -> "AssistantsClient": # type: ignore[name-defined] + """Get an authenticated AssistantsClient (from the package azure-ai-assistants) to use with + your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + .. note:: The package `azure-ai-assistants` must be installed prior to calling this method. + + :return: An authenticated Assistant Client. + :rtype: ~azure.ai.assistants.AssistantsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-assistants` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.assistants import AssistantsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Assistant SDK is not installed. Please install it using 'pip install azure-ai-assistants'" + ) from e + + client = AssistantsClient( + endpoint=self._outer_instance._config.endpoint, # pylint: disable=protected-access + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py new file mode 100644 index 000000000000..13b218c65bec --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py @@ -0,0 +1,215 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +import logging +from typing import Any, Tuple +from pathlib import Path +from azure.storage.blob import ContainerClient +from azure.core.tracing.decorator import distributed_trace +from ._operations import DatasetsOperations as DatasetsOperationsGenerated +from ..models._models import ( + DatasetVersion, + PendingUploadRequest, + PendingUploadType, + PendingUploadResponse, +) +from ..models._enums import DatasetType, CredentialType + +logger = logging.getLogger(__name__) + + +class DatasetsOperations(DatasetsOperationsGenerated): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`datasets` attribute. + """ + + # Internal helper method to create a new dataset and return a ContainerClient from azure-storage-blob package, + # to the dataset's blob storage. + def _create_dataset_and_get_its_container_client( + self, + name: str, + input_version: str, + ) -> Tuple[ContainerClient, str]: + + pending_upload_response: PendingUploadResponse = self.start_pending_upload_version( + name=name, + version=input_version, + body=PendingUploadRequest(pending_upload_type=PendingUploadType.TEMPORARY_BLOB_REFERENCE), + ) + output_version: str = input_version + + if not pending_upload_response.blob_reference_for_consumption: + raise ValueError("Blob reference for consumption is not present") + if not pending_upload_response.blob_reference_for_consumption.credential.type: + raise ValueError("Credential type is not present") + if pending_upload_response.blob_reference_for_consumption.credential.type != CredentialType.SAS: + raise ValueError("Credential type is not SAS") + if not pending_upload_response.blob_reference_for_consumption.blob_uri: + raise ValueError("Blob URI is not present or empty") + + if logger.getEffectiveLevel() == logging.DEBUG: + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.pending_upload_id = %s.", + pending_upload_response.pending_upload_id, + ) + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.pending_upload_type = %s.", + pending_upload_response.pending_upload_type, + ) # == PendingUploadType.TEMPORARY_BLOB_REFERENCE + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.blob_uri = %s.", + pending_upload_response.blob_reference_for_consumption.blob_uri, + ) # Hosted on behalf of (HOBO) not visible to the user. If the form of: "https://.blob.core.windows.net/?" + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.storage_account_arm_id = %s.", + pending_upload_response.blob_reference_for_consumption.storage_account_arm_id, + ) # /subscriptions/<>/resourceGroups/<>/Microsoft.Storage/accounts/<> + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.credential.sas_uri = %s.", + pending_upload_response.blob_reference_for_consumption.credential.sas_uri, + ) + logger.debug( + "[_create_dataset_and_get_its_container_client] pending_upload_response.blob_reference_for_consumption.credential.type = %s.", + pending_upload_response.blob_reference_for_consumption.credential.type, + ) # == CredentialType.SAS + + # For overview on Blob storage SDK in Python see: + # https://learn.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-python + # https://learn.microsoft.com/azure/storage/blobs/storage-blob-upload-python + + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-from-container-url + return ( + ContainerClient.from_container_url( + container_url=pending_upload_response.blob_reference_for_consumption.blob_uri, # Of the form: "https://.blob.core.windows.net/?" + ), + output_version, + ) + + @distributed_trace + def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + """Upload file to a blob storage, and create a dataset that references this file. + This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package + to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. + + :keyword name: The name of the dataset. Required. + :paramtype name: str + :keyword version: The version identifier for the dataset. Required. + :paramtype version: str + :keyword file: The file name (including optional path) to be uploaded. Required. + :paramtype file: str + :return: The created dataset version. + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. + """ + + path_file = Path(file) + if not path_file.exists(): + raise ValueError("The provided file does not exist.") + if path_file.is_dir(): + raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead") + + container_client, output_version = self._create_dataset_and_get_its_container_client( + name=name, input_version=version + ) + + with container_client: + + with open(file=file, mode="rb") as data: + + blob_name = path_file.name # Extract the file name from the path. + logger.debug( + "[upload_file_and_create] Start uploading file `%s` as blob `%s`.", + file, + blob_name, + ) + + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob + with container_client.upload_blob(name=blob_name, data=data, **kwargs) as blob_client: + + logger.debug("[upload_file_and_create] Done uploading") + + dataset_version = self.create_version( + name=name, + version=output_version, + body=DatasetVersion( + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url + # Per above doc the ".url" contains SAS token... should this be stripped away? + dataset_uri=blob_client.url, # ".blob.windows.core.net//" + type=DatasetType.URI_FILE, + ), + ) + + return dataset_version + + @distributed_trace + def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: + """Upload all files in a folder and its sub folders to a blob storage, while maintaining + relative paths, and create a dataset that references this folder. + This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package + to upload each file. Any keyword arguments provided will be passed to the `upload_blob` method. + + :keyword name: The name of the dataset. Required. + :paramtype name: str + :keyword version: The version identifier for the dataset. Required. + :paramtype version: str + :keyword folder: The folder name (including optional path) to be uploaded. Required. + :paramtype file: str + :return: The created dataset version. + :rtype: ~azure.ai.projects.models.DatasetVersion + :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. + """ + path_folder = Path(folder) + if not Path(path_folder).exists(): + raise ValueError("The provided folder does not exist.") + if Path(path_folder).is_file(): + raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.") + + container_client, output_version = self._create_dataset_and_get_its_container_client( + name=name, input_version=version + ) + + with container_client: + + # Recursively traverse all files in the folder + files_uploaded: bool = False + for file_path in path_folder.rglob("*"): # `rglob` matches all files and folders recursively + if file_path.is_file(): # Check if the path is a file. Skip folders. + blob_name = file_path.relative_to(path_folder) # Blob name relative to the folder + logger.debug( + "[upload_folder_and_create] Start uploading file `%s` as blob `%s`.", + file_path, + blob_name, + ) + with file_path.open("rb") as data: # Open the file for reading in binary mode + # TODO: Is there an upload_folder? + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob + container_client.upload_blob(name=str(blob_name), data=data, **kwargs) + logger.debug("[upload_folder_and_create] Done uploaded.") + files_uploaded = True + + if not files_uploaded: + raise ValueError("The provided folder is empty.") + + dataset_version = self.create_version( + name=name, + version=output_version, + body=DatasetVersion( + # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url + # Per above doc the ".url" contains SAS token... should this be stripped away? + dataset_uri=container_client.url, # ".blob.windows.core.net/ ?" + type=DatasetType.URI_FOLDER, + ), + ) + + return dataset_version diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py new file mode 100644 index 000000000000..0aea6d42eaeb --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py @@ -0,0 +1,291 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +import logging +from typing import Optional, Iterable +from urllib.parse import urlparse +from azure.core.exceptions import ResourceNotFoundError +from azure.core.tracing.decorator import distributed_trace +from ..models._models import Connection, ApiKeyCredentials, EntraIDCredentials +from ..models._enums import CredentialType, ConnectionType + +logger = logging.getLogger(__name__) + + +class InferenceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`inference` attribute. + """ + + def __init__(self, outer_instance: "azure.ai.projects.onedp.AIProjectClient") -> None: # type: ignore[name-defined] + + # All returned inference clients will have this application id set on their user-agent. + # For more info on user-agent HTTP header, see: + # https://azure.github.io/azure-sdk/general_azurecore.html#telemetry-policy + USER_AGENT_APP_ID = "AIProjectClient" + + if hasattr(outer_instance, "_user_agent") and outer_instance._user_agent: + # If the calling application has set "user_agent" when constructing the AIProjectClient, + # take that value and prepend it to USER_AGENT_APP_ID. + self._user_agent = f"{outer_instance._user_agent}-{USER_AGENT_APP_ID}" + else: + self._user_agent = USER_AGENT_APP_ID + + self._outer_instance = outer_instance + + @classmethod + def _get_inference_url(cls, input_url: str) -> str: + """ + Converts an input URL in the format: + https:/// + to: + https:///api/models + + :param input_url: The input endpoint URL used to construct AIProjectClient. + :type input_url: str + + :return: The endpoint URL required to construct inference clients from the azure-ai-inference package. + :rtype: str + """ + parsed = urlparse(input_url) + if parsed.scheme != "https" or not parsed.netloc: + raise ValueError("Invalid endpoint URL format. Must be an https URL with a host.") + new_url = f"https://{parsed.netloc}/api/models" + return new_url + + @distributed_trace + def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # type: ignore[name-defined] + """Get an authenticated ChatCompletionsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports chat completions must be deployed. + + .. note:: The package `azure-ai-inference` must be installed prior to calling this method. + + :return: An authenticated chat completions client. + :rtype: ~azure.ai.inference.ChatCompletionsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference import ChatCompletionsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # TODO: Remove this before //build? + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = ChatCompletionsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace + def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore[name-defined] + """Get an authenticated EmbeddingsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports text embeddings must be deployed. + + .. note:: The package `azure-ai-inference` must be installed prior to calling this method. + + :return: An authenticated Embeddings client. + :rtype: ~azure.ai.inference.EmbeddingsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference import EmbeddingsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = EmbeddingsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace + def get_image_embeddings_client(self, **kwargs) -> "ImageEmbeddingsClient": # type: ignore[name-defined] + """Get an authenticated ImageEmbeddingsClient (from the package azure-ai-inference) to use with + AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of + ChatCompletionsClient. + + At least one AI model that supports image embeddings must be deployed. + + .. note:: The package `azure-ai-inference` must be installed prior to calling this method. + + :return: An authenticated Image Embeddings client. + :rtype: ~azure.ai.inference.ImageEmbeddingsClient + + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `azure-ai-inference` package + is not installed. + :raises ~azure.core.exceptions.HttpResponseError: + """ + + try: + from azure.ai.inference import ImageEmbeddingsClient + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "Azure AI Inference SDK is not installed. Please install it using 'pip install azure-ai-inference'" + ) from e + + endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This + # is only relevent of course if EntraID auth is used. + credential_scopes = ["https://cognitiveservices.azure.com/.default"] + + client = ImageEmbeddingsClient( + endpoint=endpoint, + credential=self._outer_instance._config.credential, # pylint: disable=protected-access + credential_scopes=credential_scopes, + user_agent=kwargs.pop("user_agent", self._user_agent), + **kwargs, + ) + + return client + + @distributed_trace + def get_azure_openai_client( + self, *, api_version: Optional[str] = None, connection_name: Optional[str] = None, **kwargs + ) -> "AzureOpenAI": # type: ignore[name-defined] + """Get an authenticated AzureOpenAI client (from the `openai` package) for the default + Azure OpenAI connection (if `connection_name` is not specificed), or from the Azure OpenAI + resource given by its connection name. + + .. note:: The package `openai` must be installed prior to calling this method. + + :keyword api_version: The Azure OpenAI api-version to use when creating the client. Optional. + See "Data plane - Inference" row in the table at + https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs. If this keyword + is not specified, you must set the environment variable `OPENAI_API_VERSION` instead. + :paramtype api_version: str + :keyword connection_name: The name of a connection to an Azure OpenAI resource in your AI Foundry project. + resource. Optional. If not provided, the default Azure OpenAI connection will be used. + :type connection_name: str + + :return: An authenticated AzureOpenAI client + :rtype: ~openai.AzureOpenAI + + :raises ~azure.core.exceptions.ResourceNotFoundError: if an Azure OpenAI connection + does not exist. + :raises ~azure.core.exceptions.ModuleNotFoundError: if the `openai` package + is not installed. + :raises ValueError: if the connection name is an empty string. + :raises ~azure.core.exceptions.HttpResponseError: + """ + if connection_name is not None and not connection_name: + raise ValueError("Connection name cannot be empty") + + try: + from openai import AzureOpenAI + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "OpenAI SDK is not installed. Please install it using 'pip install openai'" + ) from e + + connection = Connection() + if connection_name: + connection = self._outer_instance.connections.get(name=connection_name, **kwargs) + if connection.type != ConnectionType.AZURE_OPEN_AI: + raise ValueError(f"Connection `{connection_name}` is not of type Azure OpenAI.") + else: + # If connection name was not specified, try to get the default Azure OpenAI connection. + connections: Iterable[Connection] = self._outer_instance.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, default_connection=True, **kwargs + ) + try: + connection = next(iter(connections)) + except StopAsyncIteration as exc: + raise ResourceNotFoundError("No default Azure OpenAI connection found.") from exc + connection_name = connection.name + + # TODO: if there isn't a default openai connection, we would have to by convention + # use https://{resource-name}.openai.azure.com where {resource-name} is the same as the + # foundry API endpoint (https://{resource-name}.services.ai.azure.com) + + # TODO: Confirm that it's okay to do two REST API calls here. + # If the connection uses API key authentication, we need to make another service call to get + # the connection with API key populated. + if connection.credentials.auth_type == CredentialType.API_KEY: + connection = self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs) + + logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection)) + + azure_endpoint = connection.target[:-1] if connection.target.endswith("/") else connection.target + + if isinstance(connection.credentials, ApiKeyCredentials): + + logger.debug( + "[InferenceOperations.get_azure_openai_client] Creating AzureOpenAI using API key authentication" + ) + api_key = connection.credentials.api_key + client = AzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + + elif isinstance(connection.credentials, EntraIDCredentials): + + logger.debug( + "[InferenceOperations.get_azure_openai_client] Creating AzureOpenAI using Entra ID authentication" + ) + + try: + from azure.identity import get_bearer_token_provider + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + "azure.identity package not installed. Please install it using 'pip install azure.identity'" + ) from e + + client = AzureOpenAI( + # See https://learn.microsoft.com/python/api/azure-identity/azure.identity?view=azure-python#azure-identity-get-bearer-token-provider # pylint: disable=line-too-long + azure_ad_token_provider=get_bearer_token_provider( + self._outer_instance._config.credential, # pylint: disable=protected-access + "https://cognitiveservices.azure.com/.default", # pylint: disable=protected-access + ), + azure_endpoint=azure_endpoint, + api_version=api_version, + ) + + else: + raise ValueError("Unsupported authentication type {connection.auth_type}") + + return client diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_telemetry.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_telemetry.py new file mode 100644 index 000000000000..a398ec81876b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_telemetry.py @@ -0,0 +1,66 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Optional, Iterable +from azure.core.exceptions import ResourceNotFoundError +from azure.core.tracing.decorator import distributed_trace +from ..models._models import Connection, ApiKeyCredentials +from ..models._enums import ConnectionType + + +class TelemetryOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.onedp.AIProjectClient`'s + :attr:`telemetry` attribute. + """ + + _connection_string: Optional[str] = None + + def __init__(self, outer_instance: "azure.ai.projects.onedp.AIProjectClient") -> None: # type: ignore[name-defined] + self._outer_instance = outer_instance + + @distributed_trace + def get_connection_string(self) -> str: + """Get the Application Insights connection string associated with the Project's Application Insights resource. + + :return: The Application Insights connection string if a the resource was enabled for the Project. + :rtype: str + :raises ~azure.core.exceptions.ResourceNotFoundError: An Application Insights connection does not + exist for this Foundry project. + """ + if not self._connection_string: + + # TODO: Two REST APIs calls can be replaced by one if we have had REST API for get_with_credentials(connection_type=ConnectionType.APPLICATION_INSIGHTS) + # Returns an empty Iterable if no connections exits. + connections: Iterable[Connection] = self._outer_instance.connections.list( + connection_type=ConnectionType.APPLICATION_INSIGHTS, + default_connection=True, + ) + + connection_name: Optional[str] = None + for connection in connections: + connection_name = connection.name + break + if not connection_name: + raise ResourceNotFoundError("No Application Insights connection found.") + + connection = self._outer_instance.connections.get_with_credentials(name=connection_name) + + if isinstance(connection.credentials, ApiKeyCredentials): + if not connection.credentials.api_key: + raise ValueError("Application Insights connection does not have a connection string.") + self._connection_string = connection.credentials.api_key + else: + raise ValueError("Application Insights connection does not use API Key credentials.") + + return self._connection_string diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/py.typed b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/__init__.py new file mode 100644 index 000000000000..73d7e533e421 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import ServicePatternsOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ServicePatternsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_operations.py new file mode 100644 index 000000000000..7a254e8234ef --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_operations.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.core import AsyncPipelineClient + +from ...._serialization import Deserializer, Serializer +from ....aio._configuration import AIProjectClientConfiguration +from ...buildingblocks.aio.operations._operations import ServicePatternsBuildingBlocksOperations + + +class ServicePatternsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`service_patterns` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + self.building_blocks = ServicePatternsBuildingBlocksOperations( + self._client, self._config, self._serialize, self._deserialize + ) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_operations.py new file mode 100644 index 000000000000..1fe29af2283d --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.core import AsyncPipelineClient + +from ....._serialization import Deserializer, Serializer +from .....aio._configuration import AIProjectClientConfiguration + + +class ServicePatternsBuildingBlocksOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`building_blocks` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/__init__.py new file mode 100644 index 000000000000..933fcd7d1b55 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/__init__.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_operations.py new file mode 100644 index 000000000000..85dbf085c333 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.core import PipelineClient + +from ...._configuration import AIProjectClientConfiguration +from ...._serialization import Deserializer, Serializer + + +class ServicePatternsBuildingBlocksOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`building_blocks` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/buildingblocks/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/__init__.py new file mode 100644 index 000000000000..73d7e533e421 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import ServicePatternsOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ServicePatternsOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_operations.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_operations.py new file mode 100644 index 000000000000..a72bb6287da1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_operations.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.core import PipelineClient + +from ..._configuration import AIProjectClientConfiguration +from ..._serialization import Deserializer, Serializer +from ..buildingblocks.operations._operations import ServicePatternsBuildingBlocksOperations + + +class ServicePatternsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`service_patterns` attribute. + """ + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AIProjectClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + self.building_blocks = ServicePatternsBuildingBlocksOperations( + self._client, self._config, self._serialize, self._deserialize + ) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/servicepatterns/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/cspell.json b/sdk/ai/azure-ai-projects-onedp/cspell.json new file mode 100644 index 000000000000..9a1cd7242666 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/cspell.json @@ -0,0 +1,10 @@ +{ + "ignoreWords": [ + "onedp", + "aiproject", + "Prompty", + "azureopenai", + ], + "ignorePaths": [ + ] +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt b/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt new file mode 100644 index 000000000000..0d68a203d514 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt @@ -0,0 +1,8 @@ +-e ../../../tools/azure-sdk-tools +../../core/azure-core +../../identity/azure-identity +aiohttp +azure.storage.blob +azure.ai.inference +openai +prompty diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/conftest.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/conftest.py new file mode 100644 index 000000000000..dd8e527abab1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + aiproject_subscription_id = os.environ.get("AIPROJECT_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + aiproject_tenant_id = os.environ.get("AIPROJECT_TENANT_ID", "00000000-0000-0000-0000-000000000000") + aiproject_client_id = os.environ.get("AIPROJECT_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + aiproject_client_secret = os.environ.get("AIPROJECT_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=aiproject_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations.py new file mode 100644 index 000000000000..e0617e0a3216 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectConnectionsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_connections_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.connections.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_connections_get_with_credentials(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.connections.get_with_credentials( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_connections_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.connections.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_connections_list_with_credentials(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.connections.list_with_credentials() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations_async.py new file mode 100644 index 000000000000..3e2be72c3e31 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_connections_operations_async.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectConnectionsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_connections_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.connections.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_connections_get_with_credentials(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.connections.get_with_credentials( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_connections_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.connections.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_connections_list_with_credentials(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.connections.list_with_credentials() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations.py new file mode 100644 index 000000000000..47efe9d971a8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations.py @@ -0,0 +1,108 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDatasetsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_list_versions(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.list_versions( + name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_list_latest(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.list_latest() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_get_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_delete_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_create_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.create_version( + name="str", + version="str", + body={ + "datasetUri": "str", + "name": "str", + "openAIPurpose": "str", + "type": "uri_file", + "version": "str", + "description": "str", + "id": "str", + "isReference": bool, + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_start_pending_upload_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.start_pending_upload_version( + name="str", + version="str", + body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_datasets_get_credentials(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.datasets.get_credentials( + name="str", + version="str", + body={}, + content_type="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations_async.py new file mode 100644 index 000000000000..f2f95ea4084e --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_datasets_operations_async.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDatasetsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_list_versions(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.datasets.list_versions( + name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_list_latest(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.datasets.list_latest() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_get_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_delete_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_create_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.create_version( + name="str", + version="str", + body={ + "datasetUri": "str", + "name": "str", + "openAIPurpose": "str", + "type": "uri_file", + "version": "str", + "description": "str", + "id": "str", + "isReference": bool, + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_start_pending_upload_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.start_pending_upload_version( + name="str", + version="str", + body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_datasets_get_credentials(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.datasets.get_credentials( + name="str", + version="str", + body={}, + content_type="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations.py new file mode 100644 index 000000000000..b0e1e586d866 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDeploymentsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_deployments_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.deployments.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_deployments_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.deployments.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations_async.py new file mode 100644 index 000000000000..3958d83eab29 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_deployments_operations_async.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectDeploymentsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_deployments_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.deployments.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_deployments_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.deployments.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations.py new file mode 100644 index 000000000000..b68d4d88d17a --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationResultsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_list_versions(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.list_versions( + name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_list_latest(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.list_latest() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_get_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_delete_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_create(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.create( + name="str", + body={ + "name": "str", + "version": "str", + "BlobUri": "str", + "DatasetFamily": "str", + "DatasetName": "str", + "Metrics": {"str": 0.0}, + "ModelAssetId": "str", + "ModelName": "str", + "ModelVersion": "str", + "ResultType": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_create_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.create_version( + name="str", + version="str", + body={ + "name": "str", + "version": "str", + "BlobUri": "str", + "DatasetFamily": "str", + "DatasetName": "str", + "Metrics": {"str": 0.0}, + "ModelAssetId": "str", + "ModelName": "str", + "ModelVersion": "str", + "ResultType": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluation_results_start_pending_upload(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.start_pending_upload( + name="str", + version="str", + body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations_async.py new file mode 100644 index 000000000000..b90df81464cd --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluation_results_operations_async.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationResultsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_list_versions(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.list_versions( + name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_list_latest(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.evaluation_results.list_latest() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_get_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluation_results.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_delete_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluation_results.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_create(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluation_results.create( + name="str", + body={ + "name": "str", + "version": "str", + "BlobUri": "str", + "DatasetFamily": "str", + "DatasetName": "str", + "Metrics": {"str": 0.0}, + "ModelAssetId": "str", + "ModelName": "str", + "ModelVersion": "str", + "ResultType": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_create_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluation_results.create_version( + name="str", + version="str", + body={ + "name": "str", + "version": "str", + "BlobUri": "str", + "DatasetFamily": "str", + "DatasetName": "str", + "Metrics": {"str": 0.0}, + "ModelAssetId": "str", + "ModelName": "str", + "ModelVersion": "str", + "ResultType": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluation_results_start_pending_upload(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluation_results.start_pending_upload( + name="str", + version="str", + body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations.py new file mode 100644 index 000000000000..2f00a1320ada --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_evaluations_create_run(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.evaluations.create_run( + evaluation={ + "data": "input_data", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "id": "str", + "description": "str", + "displayName": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations_async.py new file mode 100644 index 000000000000..e3323b6811a8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_evaluations_operations_async.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectEvaluationsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluations.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.evaluations.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_evaluations_create_run(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.evaluations.create_run( + evaluation={ + "data": "input_data", + "evaluators": {"str": {"id": "str", "dataMapping": {"str": "str"}, "initParams": {"str": {}}}}, + "id": "str", + "description": "str", + "displayName": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations.py new file mode 100644 index 000000000000..ce533ee305e8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectIndexesOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_list_versions(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.list_versions( + name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_list_latest(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.list_latest() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_get_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_delete_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_indexes_create_version(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.indexes.create_version( + name="str", + version="str", + body={ + "connectionName": "str", + "indexName": "str", + "name": "str", + "type": "AzureSearch", + "version": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations_async.py new file mode 100644 index 000000000000..c2e8e93d8684 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_indexes_operations_async.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectIndexesOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_list_versions(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.indexes.list_versions( + name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_list_latest(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.indexes.list_latest() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_get_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.get_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_delete_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.delete_version( + name="str", + version="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_indexes_create_version(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.indexes.create_version( + name="str", + version="str", + body={ + "connectionName": "str", + "indexName": "str", + "name": "str", + "type": "AzureSearch", + "version": "str", + "description": "str", + "id": "str", + "stage": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations.py new file mode 100644 index 000000000000..59f381b1ce3c --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import AIProjectClientTestBase, AIProjectPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectRedTeamsOperations(AIProjectClientTestBase): + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_get(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_list(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy + def test_red_teams_create_run(self, aiproject_endpoint): + client = self.create_client(endpoint=aiproject_endpoint) + response = client.red_teams.create_run( + red_team={ + "attackStrategy": ["str"], + "id": "str", + "numTurns": 0, + "riskCategories": ["str"], + "scanName": "str", + "simulationOnly": bool, + "applicationScenario": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations_async.py new file mode 100644 index 000000000000..5af2f7fc0d1b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/test_ai_project_red_teams_operations_async.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import AIProjectPreparer +from testpreparer_async import AIProjectClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestAIProjectRedTeamsOperationsAsync(AIProjectClientTestBaseAsync): + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_get(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.red_teams.get( + name="str", + ) + + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_list(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = client.red_teams.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @AIProjectPreparer() + @recorded_by_proxy_async + async def test_red_teams_create_run(self, aiproject_endpoint): + client = self.create_async_client(endpoint=aiproject_endpoint) + response = await client.red_teams.create_run( + red_team={ + "attackStrategy": ["str"], + "id": "str", + "numTurns": 0, + "riskCategories": ["str"], + "scanName": "str", + "simulationOnly": bool, + "applicationScenario": "str", + "properties": {"str": "str"}, + "status": "str", + "tags": {"str": "str"}, + }, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer.py new file mode 100644 index 000000000000..a1c48db04a3f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer.py @@ -0,0 +1,26 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.projects.onedp import AIProjectClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class AIProjectClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(AIProjectClient) + return self.create_client_from_credential( + AIProjectClient, + credential=credential, + endpoint=endpoint, + ) + + +AIProjectPreparer = functools.partial( + PowerShellPreparer, "aiproject", aiproject_endpoint="https://fake_aiproject_endpoint.com" +) diff --git a/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer_async.py b/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..5a7d4906a0bc --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/generated_tests/testpreparer_async.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.projects.onedp.aio import AIProjectClient +from devtools_testutils import AzureRecordedTestCase + + +class AIProjectClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(AIProjectClient, is_async=True) + return self.create_client_from_credential( + AIProjectClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/ai/azure-ai-projects-onedp/pyproject.toml b/sdk/ai/azure-ai-projects-onedp/pyproject.toml new file mode 100644 index 000000000000..a7c22e0ddd95 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/pyproject.toml @@ -0,0 +1,26 @@ +[tool.mypy] +python_version = "3.10" +exclude = [ + # Chat completion client expects list of parent class type and gets child type instead + "sample_chat_completions_with_azure_ai_inference_client_and_console_tracing\\.py", + "sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing\\.py", + "sample_chat_completions_with_azure_ai_inference_client\\.py", + "sample_inference_client_from_connection.py" +] +warn_unused_configs = true +ignore_missing_imports = true +follow_imports_for_stubs = false + +[tool.isort] +profile = "black" +line_length = 120 +known_first_party = ["azure"] +filter_files=true +extend_skip_glob = [ + "*/_vendor/*", + "*/_generated/*", + "*/_restclient/*", + "*/doc/*", + "*/.tox/*", +] + diff --git a/sdk/ai/azure-ai-projects-onedp/pyrightconfig.json b/sdk/ai/azure-ai-projects-onedp/pyrightconfig.json new file mode 100644 index 000000000000..c6dcf991a868 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/pyrightconfig.json @@ -0,0 +1,14 @@ +{ + "reportTypeCommentUsage": true, + "reportMissingImports": false, + "pythonVersion": "3.11", + "exclude": [ + ], + "extraPaths": [ + "./../../core/azure-core", + "./../../evaluation/azure-ai-evaluation", + "./../../identity/azure-identity", + "./../../monitor/azure-monitor-opentelemetry", + "./../azure-ai-inference" + ] +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client.py b/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client.py new file mode 100644 index 000000000000..4c32a14c0c91 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + AssistantsClient from the azure.ai.assistants package. For more information on + the azure.ai.assistants package see https://pypi.org/project/azure-ai-assistants/. + +USAGE: + python sample_get_assistants_client.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-assistants azure-identity + + Set this environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient + +endpoint = os.environ["PROJECT_ENDPOINT"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START assistants_sample] + with project_client.assistants.get_client() as client: + # TODO: Do something with the assistants client... + pass + # [END sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client_async.py b/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client_async.py new file mode 100644 index 000000000000..e243a0ecf64b --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/assistants/sample_get_assistants_client_async.py @@ -0,0 +1,49 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + asynchronous AssistantsClient from the azure.ai.assistants package. For more information on + the azure.ai.assistants package see https://pypi.org/project/azure-ai-assistants/. + +USAGE: + python sample_get_assistants_client_async.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-assistants aiohttp azure-identity + + Set this environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. +""" + +import os +import asyncio +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient + + +async def sample_get_assistants_client_async(): + + endpoint = os.environ["PROJECT_ENDPOINT"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + async with project_client.assistants.get_client() as client: + # TODO: Do something with the assistant client... + pass + + +async def main(): + await sample_get_assistants_client_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections.py b/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections.py new file mode 100644 index 000000000000..c57a0a8576c0 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections.py @@ -0,0 +1,53 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.connections` methods to enumerate the properties of all connections + and get the properties of a connection by its name. + +USAGE: + python sample_connections.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) CONNECTION_NAME - The name of a connection, as found in the "Connected resources" tab + in the Management Center of your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.models import ConnectionType + +endpoint = os.environ["PROJECT_ENDPOINT"] +connection_name = os.environ["CONNECTION_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START connections_sample] + print("List the properties of all connections:") + for connection in project_client.connections.list(): + print(connection) + + print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") + for connection in project_client.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, + ): + print(connection) + + print(f"Get the properties of a connection named `{connection_name}`:") + connection = project_client.connections.get(connection_name) + print(connection) + # [END connection_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections_async.py b/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections_async.py new file mode 100644 index 000000000000..526643f5a9b1 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/connections/sample_connections_async.py @@ -0,0 +1,63 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the asynchronous + `.connections` methods to enumerate the properties of all connections + and get the properties of a connection by its name. + +USAGE: + python sample_connections_async.py + + Before running the sample: + + pip install azure-ai-projects azure-identity aiohttp + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) CONNECTION_NAME - The name of a connection, as found in the "Connected resources" tab + in the Management Center of your AI Foundry project. +""" + +import asyncio +import os +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.ai.projects.onedp.models import ConnectionType + + +async def sample_connections_async() -> None: + + endpoint = os.environ["PROJECT_ENDPOINT"] + connection_name = os.environ["CONNECTION_NAME"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + print("List the properties of all connections:") + async for connection in project_client.connections.list(): + print(connection) + + print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") + async for connection in project_client.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, + ): + print(connection) + + print(f"Get the properties of a connection named `{connection_name}`:") + connection = await project_client.connections.get(connection_name) + print(connection) + + +async def main(): + await sample_connections_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets.py b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets.py new file mode 100644 index 000000000000..e2e9f1540abd --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets.py @@ -0,0 +1,96 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.datasets` methods to upload files, create Datasets that reference those files, + list Datasets and delete Datasets. + +USAGE: + python sample_datasets.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DATASET_NAME - Optional. The name of the Dataset to create and use in this sample. + 3) DATASET_VERSION - Optional. The version of the Dataset to create and use in this sample. +""" +# TODO: Remove console logging +import sys +import logging + +logger = logging.getLogger("azure") +logger.setLevel(logging.DEBUG) +logger.addHandler(logging.StreamHandler(stream=sys.stdout)) +identity_logger = logging.getLogger("azure.identity") +identity_logger.setLevel(logging.ERROR) +# End logging + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.models import DatasetVersion, ListViewType + +endpoint = os.environ["PROJECT_ENDPOINT"] +dataset_name = os.environ.get("DATASET_NAME", "my-dataset") +dataset_version = os.environ.get("DATASET_VERSION", "1.0") + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), + logging_enable=True, +) as project_client: + + # [START datasets_sample] + print( + "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." + ) + dataset: DatasetVersion = project_client.datasets.upload_file_and_create( + name=dataset_name, + version=dataset_version, + file="sample_folder/sample_file1.txt", + ) + print(dataset) + + """ + print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") + dataset = project_client.datasets.upload_folder_and_create( + name=dataset_name, + version="2", + folder="sample_folder", + ) + print(dataset) + + print("Upload a single file to the existing dataset, while letting the service increment the version") + dataset: DatasetVersion = project_client.datasets.upload_file_and_create( + name=dataset_name, + file="sample_folder/file2.txt", + ) + print(dataset) + + print("Get an existing Dataset version `1`:") + dataset = project_client.datasets.get_version(name=dataset_name, version="1") + print(dataset) + + print(f"Listing all versions of the Dataset named `{dataset_name}`:") + for dataset in project_client.datasets.list_versions(name=dataset_name): + print(dataset) + + print("List latest versions of all Datasets:") + for dataset in project_client.datasets.list_latest(): + print(dataset) + + print("Delete all Dataset versions created above:") + project_client.datasets.delete_version(name=dataset_name, version="1") + project_client.datasets.delete_version(name=dataset_name, version="2") + project_client.datasets.delete_version(name=dataset_name, version="3") + """ + # [END dataset_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets_async.py b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets_async.py new file mode 100644 index 000000000000..ad499565e8ad --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_datasets_async.py @@ -0,0 +1,94 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the asynchronous + `.datasets` methods to upload files, create Datasets that reference those files, + list Datasets and delete Datasets. + +USAGE: + python sample_datasets_async.py + + Before running the sample: + + pip install azure-ai-projects azure-identity aiohttp + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DATASET_NAME - Required. The name of the Dataset to create and use in this sample. +""" + +import asyncio +import os +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.ai.projects.onedp.models import DatasetVersion + + +async def sample_datasets_async() -> None: + + endpoint = os.environ["PROJECT_ENDPOINT"] + dataset_name = os.environ["DATASET_NAME"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + print( + """Upload a single file and create a new Dataset to reference the file. + Here we explicitly specify the dataset version.""" + ) + dataset: DatasetVersion = await project_client.datasets.upload_file_and_create( + name=dataset_name, + version="1", + file="sample_folder/sample_file1.txt", + ) + print(dataset) + + """ + print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") + dataset = await project_client.datasets.upload_folder_and_create( + name=dataset_name, + version="2", + folder="sample_folder", + ) + print(dataset) + + print("Upload a single file to the existing dataset, while letting the service increment the version") + dataset: DatasetVersion = await project_client.datasets.upload_file_and_create( + name=dataset_name, + file="sample_folder/file2.txt", + ) + print(dataset) + + print("Get an existing Dataset version `1`:") + dataset = await project_client.datasets.get_version(name=dataset_name, version="1") + print(dataset) + + print(f"Listing all versions of the Dataset named `{dataset_name}`:") + async for dataset in project_client.datasets.list_versions(name=dataset_name): + print(dataset) + + print("List latest versions of all Datasets:") + async for dataset in project_client.datasets.list_latest(): + print(dataset) + + print("Delete all Dataset versions created above:") + await project_client.datasets.delete_version(name=dataset_name, version="1") + await project_client.datasets.delete_version(name=dataset_name, version="2") + await project_client.datasets.delete_version(name=dataset_name, version="3") + """ + + +async def main(): + await sample_datasets_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file1.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file1.txt new file mode 100644 index 000000000000..e129759a15ff --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file1.txt @@ -0,0 +1 @@ +This is sample file 1 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file2.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file2.txt new file mode 100644 index 000000000000..3dd74cdfc9eb --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_file2.txt @@ -0,0 +1 @@ +This is sample file 2 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file3.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file3.txt new file mode 100644 index 000000000000..dde35c02f5a4 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file3.txt @@ -0,0 +1 @@ +This is sample file 3 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file4.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file4.txt new file mode 100644 index 000000000000..0d17a14a0c1f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder1/sample_file4.txt @@ -0,0 +1 @@ +This is sample file 4 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file5.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file5.txt new file mode 100644 index 000000000000..2f4e87e14c77 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file5.txt @@ -0,0 +1 @@ +This is sample file 5 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file6.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file6.txt new file mode 100644 index 000000000000..e55c3637cdf5 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file6.txt @@ -0,0 +1 @@ +This is sample file 6 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file7.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file7.txt new file mode 100644 index 000000000000..843cf01054c4 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_file7.txt @@ -0,0 +1 @@ +This is sample file 7 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_folder3/sample_file8.txt b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_folder3/sample_file8.txt new file mode 100644 index 000000000000..73a747b6b520 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/datasets/sample_folder/sample_subfolder2/sample_folder3/sample_file8.txt @@ -0,0 +1 @@ +This is sample file 8 diff --git a/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments.py b/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments.py new file mode 100644 index 000000000000..4ae28637a2aa --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments.py @@ -0,0 +1,50 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.deployments` methods to enumerate AI models deployed to your AI Foundry Project. + +USAGE: + python sample_deployments.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. + 3) MODEL_PUBLISHER - Required. The publisher of the model to filter by. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] +model_publisher = os.environ["MODEL_PUBLISHER"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START deployments_sample] + print("List all deployments:") + for deployment in project_client.deployments.list(): + print(deployment) + + print(f"List all deployments by the model publisher `{model_publisher}`:") + for deployment in project_client.deployments.list(model_publisher=model_publisher): + print(deployment) + + print(f"Get a single deployment named `{model_deployment_name}`:") + deployment = project_client.deployments.get(model_deployment_name) + print(deployment) + # [END deployments_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments_async.py b/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments_async.py new file mode 100644 index 000000000000..6be5a335f079 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/deployments/sample_deployments_async.py @@ -0,0 +1,60 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the asynchronous + `.deployments` methods to enumerate AI models deployed to your AI Foundry Project. + +USAGE: + python sample_deployments_async.py + + Before running the sample: + + pip install azure-ai-projects azure-identity aiohttp + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. + 3) MODEL_PUBLISHER - Required. The publisher of the model to filter by. +""" + +import asyncio +import os +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient + + +async def sample_deployments_async() -> None: + + endpoint = os.environ["PROJECT_ENDPOINT"] + model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + model_publisher = os.environ["MODEL_PUBLISHER"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + print("List all deployments:") + async for deployment in project_client.deployments.list(): + print(deployment) + + print(f"List all deployments by the model publisher `{model_publisher}`:") + async for deployment in project_client.deployments.list(model_publisher=model_publisher): + print(deployment) + + print(f"Get a single deployment named `{model_deployment_name}`:") + deployment = await project_client.deployments.get(model_deployment_name) + print(deployment) + + +async def main(): + await sample_deployments_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/evaluation/sample_evaluations.py b/sdk/ai/azure-ai-projects-onedp/samples/evaluation/sample_evaluations.py new file mode 100644 index 000000000000..f4c33f93f8d4 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/evaluation/sample_evaluations.py @@ -0,0 +1,82 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.evaluations` methods to create, get and list evaluations. + +USAGE: + python sample_evaluations.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DATASET_NAME - Required. The name of the Dataset to create and use in this sample. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.models import Evaluation, InputDataset, EvaluatorConfiguration, EvaluationMetrics +from dotenv import load_dotenv + +load_dotenv() + +endpoint = os.environ["PROJECT_ENDPOINT"] +dataset_name = os.environ["DATASET_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START evaluations_sample] + print( + "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." + ) + # dataset: DatasetVersion = project_client.datasets.upload_file_and_create( + # name=dataset_name, + # version="1", + # file="./samples_folder/sample_data_evaluation.jsonl", + # ) + # print(dataset) + + print("Create an evaluation") + # evaluation = Evaluation( + # display_name="Sample Evaluation", + # data=InputDataset(id="azureml://locations/centraluseuap/workspaces/abc/data/abc/versions/11"), + # evaluators={ + # "relevance": EvaluatorConfiguration( + # id=f"aiservices:{EvaluationMetrics.Relevance.value}", + # # id="azureml://registries/azureml/models/Retrieval-Evaluator/versions/4", + # # either client or service (TBD) resolves to azureml://registries/azureml/models/Retrieval-Evaluator/versions/... + # init_params={ + # "deployment_name": "gpt-4o", + # }, + # ), + # "hate_unfairness": EvaluatorConfiguration( + # # id=f"aiservices:{EvaluationMetrics.HateUnfairness.value}", + # id="azureml://registries/azureml/models/Retrieval-Evaluator/versions/4", + # # either client or service (TBD) resolves to azureml://registries/azureml/models/Hate-Unfairness-Evaluator/versions/... + # init_params={ + # "azure_ai_project": endpoint, + # }, + # ), + # }, + # ) + # + # evaluation_respone = project_client.evaluations.create_run(evaluation) + + print("Get evaluation") + # get_evaluation_response = project_client.evaluations.get(evaluation_respone.id) + # print(get_evaluation_response) + + # [END evaluations_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/evaluation/samples_folder/sample_data_evaluation.jsonl b/sdk/ai/azure-ai-projects-onedp/samples/evaluation/samples_folder/sample_data_evaluation.jsonl new file mode 100644 index 000000000000..c2187bb5add8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/evaluation/samples_folder/sample_data_evaluation.jsonl @@ -0,0 +1 @@ +{"query": "What is capital of France?", "context": "France is in Europe", "response": "Paris is the capital of France.", "ground_truth": "Paris is the capital of France."} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes.py b/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes.py new file mode 100644 index 000000000000..62ff5b9497a6 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes.py @@ -0,0 +1,81 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.indexes` methods to upload a file, create Indexes that reference those files, + list Indexes and delete Indexes. + +USAGE: + python sample_indexes.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) INDEX_NAME - Optional. The name of the Index to create and use in this sample. + 3) INDEX_VERSION - Optional. The version of the Index to create and use in this sample. + 4) AI_SEARCH_CONNECTION_NAME - Optional. The name of an existing AI Search connection to use in this sample. + 5) AI_SEARCH_INDEX_NAME - Optional. The name of the AI Search index to use in this sample. +""" + +# TODO: Remove console logging +import sys +import logging + +logger = logging.getLogger("azure") +logger.setLevel(logging.DEBUG) +logger.addHandler(logging.StreamHandler(stream=sys.stdout)) +identity_logger = logging.getLogger("azure.identity") +identity_logger.setLevel(logging.ERROR) +# End logging + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.models import AzureAISearchIndex + +endpoint = os.environ["PROJECT_ENDPOINT"] +index_name = os.environ.get("INDEX_NAME", "my-index") +index_version = os.environ.get("INDEX_VERSION", "1.0") +ai_search_connection_name = os.environ.get("AI_SEARCH_CONNECTION_NAME", "my-ai-search-connection-name") +ai_search_index_name = os.environ.get("AI_SEARCH_INDEX_NAME", "my-ai-search-index-name") + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), + logging_enable=True, +) as project_client: + + # [START indexes_sample] + print(f"Create an Index named `{index_name}` referencing an existing AI Search resource:") + index = project_client.indexes.create_version( + name=index_name, + version=index_version, + body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), + ) + print(index) + exit() + + print(f"Get an existing Index named `{index_name}`, version `{index_version}`:") + index = project_client.indexes.get_version(name=index_name, version=index_version) + print(index) + + print(f"Listing all versions of the Index named `{index_name}`:") + for index in project_client.indexes.list_versions(name=index_name): + print(index) + + print("List latest versions of all Indexes:") + for index in project_client.indexes.list_latest(): + print(index) + + print("Delete the Index versions created above:") + project_client.indexes.delete_version(name=index_name, version="1") + project_client.indexes.delete_version(name=index_name, version="2") + # [END indexes_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes_async.py b/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes_async.py new file mode 100644 index 000000000000..7b79c9c30424 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/indexes/sample_indexes_async.py @@ -0,0 +1,62 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the asynchronous + `.indexes` methods to upload a file, create Indexes that reference those files, + list Indexes and delete Indexes. + +USAGE: + python sample_indexes_async.py + + Before running the sample: + + pip install azure-ai-projects azure-identity aiohttp + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) INDEX_NAME - Required. The name of an Index to create and use in this sample. +""" +import asyncio +import os +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient + + +async def sample_indexes_async() -> None: + + endpoint = os.environ["PROJECT_ENDPOINT"] + index_name = os.environ["INDEX_NAME"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + print("Get an existing Index version `1`:") + index = await project_client.indexes.get_version(name=index_name, version="1") + print(index) + + print(f"Listing all versions of the Index named `{index_name}`:") + async for index in project_client.indexes.list_versions(name=index_name): + print(index) + + print("List latest versions of all Indexes:") + async for index in project_client.indexes.list_latest(): + print(index) + + print("Delete the Index versions created above:") + await project_client.indexes.delete_version(name=index_name, version="1") + await project_client.indexes.delete_version(name=index_name, version="2") + + +async def main(): + await sample_indexes_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample1.png b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample1.png new file mode 100644 index 000000000000..59d79ff28fc5 Binary files /dev/null and b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample1.png differ diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py new file mode 100644 index 000000000000..ae1456c85296 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py @@ -0,0 +1,58 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ChatCompletionsClient from the azure.ai.inference package, and perform one + chat completions operation. For more information on the azure.ai.inference package see + https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_async.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference aiohttp azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +import asyncio +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.ai.inference.models import UserMessage + + +async def sample_chat_completions_with_azure_ai_inference_client_async(): + + endpoint = os.environ["PROJECT_ENDPOINT"] + model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + async with project_client.inference.get_chat_completions_client() as client: + + response = await client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + print(response.choices[0].message.content) + + +async def main(): + await sample_chat_completions_with_azure_ai_inference_client_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py new file mode 100644 index 000000000000..6c6e878d8657 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py @@ -0,0 +1,67 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + AsyncAzureOpenAI client from the openai package, and perform one chat completions + operation. + +USAGE: + python sample_chat_completions_with_azure_openai_client_async.py + + Before running the sample: + + pip install azure-ai-projects aiohttp openai + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. + + Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here: + https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs +""" + +import os +import asyncio +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.identity.aio import DefaultAzureCredential + + +async def sample_chat_completions_with_azure_openai_client_async(): + + endpoint = os.environ["PROJECT_ENDPOINT"] + model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + # Get an authenticated AsyncAzureOpenAI client for your default Azure OpenAI connection: + async with await project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = await client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) + + +async def main(): + await sample_chat_completions_with_azure_openai_client_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py new file mode 100644 index 000000000000..5fb2a51eb679 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py @@ -0,0 +1,66 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ImageEmbeddingsClient from the azure.ai.inference package, and perform one + image embeddings operation. For more information on the azure.ai.inference package + see https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_image_embeddings_with_azure_ai_inference_client_async.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference aiohttp azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +import asyncio +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.ai.inference.models import ImageEmbeddingInput + + +async def sample_image_embeddings_with_azure_ai_inference_client_async(): + + endpoint = os.environ["PROJECT_ENDPOINT"] + model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + async with project_client.inference.get_image_embeddings_client() as client: + + response = await client.embed( + model=model_deployment_name, + input=[ImageEmbeddingInput.load(image_file="sample1.png", image_format="png")], + ) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) + + +async def main(): + await sample_image_embeddings_with_azure_ai_inference_client_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py new file mode 100644 index 000000000000..dd4d90eff5c7 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py @@ -0,0 +1,63 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async EmbeddingsClient from the azure.ai.inference package, and perform one text + embeddings operation. For more information on the azure.ai.inference package see + https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_text_embeddings_with_azure_ai_inference_client_async.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference aiohttp azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +import asyncio +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient + + +async def sample_text_embeddings_with_azure_ai_inference_client_async(): + + endpoint = os.environ["PROJECT_ENDPOINT"] + model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + async with project_client.inference.get_embeddings_client() as client: + + response = await client.embed( + model=model_deployment_name, input=["first phrase", "second phrase", "third phrase"] + ) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) + + +async def main(): + await sample_text_embeddings_with_azure_ai_inference_client_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.png b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.png new file mode 100644 index 000000000000..59d79ff28fc5 Binary files /dev/null and b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.png differ diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty new file mode 100644 index 000000000000..6dbcbf40bc6f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty @@ -0,0 +1,30 @@ +--- +name: Basic Prompt +description: A basic prompt that uses the GPT-3 chat API to answer questions +authors: + - author_1 + - author_2 +model: + api: chat + configuration: + azure_deployment: gpt-4o-mini + parameters: + temperature: 1 + frequency_penalty: 0.5 + presence_penalty: 0.5 +--- +system: +You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + +# context +{{#rules}} +{{rule}} +{{/rules}} + +{{#chat_history}} +{{role}}: +{{content}} +{{/chat_history}} + +user: +{{input}} diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py new file mode 100644 index 000000000000..b65fd265bf36 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py @@ -0,0 +1,47 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + ChatCompletionsClient from the azure.ai.inference package and perform one chat completion + operation. For more information on the azure.ai.inference package see + https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.inference.models import UserMessage + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START inference_sample] + with project_client.inference.get_chat_completions_client() as client: + + response = client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + + print(response.choices[0].message.content) + # [END inference_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py new file mode 100644 index 000000000000..9ccef92d5b63 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -0,0 +1,77 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to + * Get an authenticated ChatCompletionsClient from the azure.ai.inference package + * Define a Mustache template, and render the template with provided parameters to create a list of chat messages. + * Perform one chat completion operation. + Package azure.ai.inference required. For more information see https://pypi.org/project/azure-ai-inference/. + Package prompty required. For more information see https://pypi.org/project/prompty/. + +USAGE: + sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity prompty + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient, PromptTemplate + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + prompt_template_str = """ + system: + You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + + # context + {{#rules}} + {{rule}} + {{/rules}} + + {{#chat_history}} + {{role}}: + {{content}} + {{/chat_history}} + + user: + {{input}} + """ + prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py new file mode 100644 index 000000000000..dbaf8d16848f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py @@ -0,0 +1,59 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to + * Get an authenticated ChatCompletionsClient from the azure.ai.inference package + * Load a Prompty file and render a template with provided parameters to create a list of chat messages. + * Perform one chat completion operation. + Package azure.ai.inference required. For more information see https://pypi.org/project/azure-ai-inference/. + Package prompty required. For more information see https://pypi.org/project/prompty/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity prompty + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient, PromptTemplate + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + path = "./sample1.prompty" + prompt_template = PromptTemplate.from_prompty(file_path=path) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_openai_client.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_openai_client.py new file mode 100644 index 000000000000..f91f988d1283 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_openai_client.py @@ -0,0 +1,53 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + AzureOpenAI client from the openai package, and perform one chat completion operation. + +USAGE: + python sample_chat_completions_with_azure_openai_client.py + + Before running the sample: + + pip install azure-ai-projects openai + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. + + Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here: + https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs +""" + +import os +from azure.ai.projects.onedp import AIProjectClient +from azure.identity import DefaultAzureCredential + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + # [START aoai_sample] + with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) + # [END aoai_sample] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py new file mode 100644 index 000000000000..1ef0cfe39ca0 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py @@ -0,0 +1,50 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + ImageEmbeddingsClient from the azure.ai.inference package, and perform one image + embeddings operation. For more information on the azure.ai.inference package see + https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_image_embeddings_with_azure_ai_inference_client.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.inference.models import ImageEmbeddingInput + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + with project_client.inference.get_image_embeddings_client() as client: + + response = client.embed( + model=model_deployment_name, input=[ImageEmbeddingInput.load(image_file="sample1.png", image_format="png")] + ) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py new file mode 100644 index 000000000000..b9df7fe17234 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py @@ -0,0 +1,47 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + EmbeddingsClient from the azure.ai.inference package, and perform one text embeddings + operation. For more information on the azure.ai.inference package see + https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_text_embeddings_with_azure_ai_inference_client.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + with project_client.inference.get_embeddings_client() as client: + + response = client.embed(model=model_deployment_name, input=["first phrase", "second phrase", "third phrase"]) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry.py b/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry.py new file mode 100644 index 000000000000..5683ac091517 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry.py @@ -0,0 +1,37 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the synchronous + `.telemetry` methods to get the Application Insights connection string and + enable tracing. + +USAGE: + python sample_telemetry.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient + +endpoint = os.environ["PROJECT_ENDPOINT"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + print("Get the Application Insights connection string:") + connection_string = project_client.telemetry.get_connection_string() + print(connection_string) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry_async.py b/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry_async.py new file mode 100644 index 000000000000..037c7c8a0ea8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/telemetry/sample_telemetry_async.py @@ -0,0 +1,50 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to use the asynchronous + `.telemetry` methods to get the Application Insights connection string and enable + tracing. + +USAGE: + python sample_telemetry_async.py + + Before running the sample: + + pip install azure-ai-projects azure-identity aiohttp + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. +""" + +import os +import asyncio +from azure.identity.aio import DefaultAzureCredential +from azure.ai.projects.onedp.aio import AIProjectClient +from azure.ai.projects.onedp.models import ConnectionType + + +async def sample_telemetry_async() -> None: + + endpoint = os.environ["PROJECT_ENDPOINT"] + + async with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + ) as project_client: + + print("Get the Application Insights connection string:") + connection_string = await project_client.telemetry.get_connection_string() + print(connection_string) + + +async def main(): + await sample_telemetry_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-projects-onedp/setup.py b/sdk/ai/azure-ai-projects-onedp/setup.py new file mode 100644 index 000000000000..a7968342b0c0 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/setup.py @@ -0,0 +1,78 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# coding: utf-8 + +import os +import re +from setuptools import setup, find_packages + + +PACKAGE_NAME = "azure-ai-projects-onedp" +PACKAGE_PPRINT_NAME = "Azure AI Projects" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace("-", "/") + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError("Cannot find version information") + + +setup( + name=PACKAGE_NAME, + version=version, + description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-projects", + keywords="azure sdk, azure, ai, agents, foundry, inference, chat completion, project, evaluation", + classifiers=[ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "License :: OSI Approved :: MIT License", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + ], + zip_safe=False, + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.ai", + "azure.ai.projects", + ] + ), + include_package_data=True, + package_data={ + "azure.ai.projects": ["py.typed"], + }, + install_requires=[ + "isodate>=0.6.1", + "azure-core>=1.30.0", + "typing-extensions>=4.12.2", + "azure-storage-blob>=12.15.0", + ], + python_requires=">=3.9", + extras_require={ + "prompts": ["prompty"], + }, +) diff --git a/sdk/ai/azure-ai-projects-onedp/tests/README.md b/sdk/ai/azure-ai-projects-onedp/tests/README.md new file mode 100644 index 000000000000..d1cceb051bdb --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/tests/README.md @@ -0,0 +1,55 @@ +# Azure AI Project client library tests for Python + +The instructions below are for running tests locally, on a Windows machine, against the live service using a local build of the client library. + +## Build and install the client library + +- Clone or download this sample repository. +- Open a command prompt window in the folder `sdk\ai\azure-ai-projects-onedp` +- Install development dependencies: + ```bash + pip install -r dev_requirements.txt + ``` +- Install package from sources: + ```bash + pip install -e . + ``` + +## Log in to Azure + +```bash +az login +``` + +## Setup up environment variables + +Edit the file `azure_ai_projects_tests.env` located in the folder above. Follow the instructions there on how to set up Azure AI Foundry projects to be used for testing, and enter appropriate values for the environment variables used for the tests you want to run. + +## Configure test proxy + +Configure the test proxy to run live service tests without recordings: + +```bash +set AZURE_TEST_RUN_LIVE=true +set AZURE_SKIP_LIVE_RECORDING=true +set PROXY_URL=http://localhost:5000 +set AZURE_TEST_USE_CLI_AUTH=true +``` + +## Run tests + +To run all tests, type: + +```bash +pytest +``` + +To run tests in a particular folder (`tests\connections` for example): + +```bash +pytest tests\connections +``` + +## Additional information + +See [test documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/tests.md) for additional information, including how to set proxy recordings and run tests using recordings. diff --git a/sdk/ai/azure-ai-projects-onedp/tests/connections/test_connections.py b/sdk/ai/azure-ai-projects-onedp/tests/connections/test_connections.py new file mode 100644 index 000000000000..f1e4612563a8 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/tests/connections/test_connections.py @@ -0,0 +1,10 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +class TestConnections: + + def test_connections_get(self, **kwargs): + pass diff --git a/sdk/ai/azure-ai-projects-onedp/tsp-location.yaml b/sdk/ai/azure-ai-projects-onedp/tsp-location.yaml new file mode 100644 index 000000000000..db7ecd377143 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/ai/Azure.AI.Projects +commit: a8ac82984187d6d1141254f50b11ce771d995461 +repo: Azure/azure-rest-api-specs +additionalDirectories: diff --git a/sdk/ai/ci.yml b/sdk/ai/ci.yml index 117b9c6c785d..aadca97df423 100644 --- a/sdk/ai/ci.yml +++ b/sdk/ai/ci.yml @@ -50,6 +50,8 @@ extends: # Selection: sparse # GenerateVMJobs: true Artifacts: + - name: azure-ai-projects-onedp + safeName: azureaiprojectsonedp - name: azure-ai-projects safeName: azureaiprojects - name: azure-ai-inference