diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 6831cc1fe400..973d33394306 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -377,6 +377,7 @@ "prebuilts", "premf", "prevsnapshot", + "prompty", "pschema", "PSECRET", "pydantic", diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index e6f3442fb358..9b7eb76470e6 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -1,5 +1,24 @@ # Release History +## 1.0.0b9 (2025-04-16) + +### Features added + +* Utilities to load prompt template strings and Prompty file content +* Added BingCustomSearchTool class with sample +* Added list_threads API to agents namespace +* Added image input support for agents create_message + +### Sample updates + +* Added `project_client.agents.enable_auto_function_calls(toolset=toolset)` to all samples that has `toolcalls` executed by `azure-ai-project` SDK +* New BingCustomSearchTool sample +* New samples added for image input from url, file and base64 + +### Breaking Changes + +Redesigned automatic function calls because agents retrieved by `update_agent` and `get_agent` do not support them. With the new design, the toolset parameter in `create_agent` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. + ## 1.0.0b8 (2025-03-28) ### Features added diff --git a/sdk/ai/azure-ai-projects/MANIFEST.in b/sdk/ai/azure-ai-projects/MANIFEST.in index aee9f8c1ccc3..fba50036b227 100644 --- a/sdk/ai/azure-ai-projects/MANIFEST.in +++ b/sdk/ai/azure-ai-projects/MANIFEST.in @@ -4,4 +4,4 @@ include azure/ai/projects/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py -include azure/ai/__init__.py \ No newline at end of file +include azure/ai/__init__.py diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 1d2eb4b90ca5..7c344ab1121e 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -52,6 +52,7 @@ To report an issue with the client library, or request additional features, plea - [Create message](#create-message) with: - [File search attachment](#create-message-with-file-search-attachment) - [Code interpreter attachment](#create-message-with-code-interpreter-attachment) + - [Create Message with Image Inputs](#create-message-with-image-inputs) - [Execute Run, Run_and_Process, or Stream](#create-run-run_and_process-or-stream) - [Retrieve message](#retrieve-message) - [Retrieve file](#retrieve-file) @@ -280,6 +281,9 @@ toolset = ToolSet() toolset.add(functions) toolset.add(code_interpreter) +# To enable tool calls executed automatically +project_client.agents.enable_auto_function_calls(toolset=toolset) + agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", @@ -290,7 +294,7 @@ agent = project_client.agents.create_agent( -Also notices that if you use asynchronous client, you use `AsyncToolSet` instead. Additional information related to `AsyncFunctionTool` be discussed in the later sections. +Also notice that if you use the asynchronous client, use `AsyncToolSet` instead. Additional information related to `AsyncFunctionTool` be discussed in the later sections. Here is an example to use `tools` and `tool_resources`: @@ -463,11 +467,7 @@ print(conn_id) # Initialize agent AI search tool and add the search index connection id ai_search = AzureAISearchTool( - index_connection_id=conn_id, - index_name="sample_index", - query_type=AzureAISearchQueryType.SIMPLE, - top_k=3, - filter="" + index_connection_id=conn_id, index_name="sample_index", query_type=AzureAISearchQueryType.SIMPLE, top_k=3, filter="" ) # Create agent with AI search tool and process assistant run @@ -513,12 +513,7 @@ for message in messages.data: #### Create Agent with Function Call -You can enhance your Agents by defining callback functions as function tools. These can be provided to `create_agent` via either the `toolset` parameter or the combination of `tools` and `tool_resources`. Here are the distinctions: - -- `toolset`: When using the `toolset` parameter, you provide not only the function definitions and descriptions but also their implementations. The SDK will execute these functions within `create_and_run_process` or `streaming` . These functions will be invoked based on their definitions. -- `tools` and `tool_resources`: When using the `tools` and `tool_resources` parameters, only the function definitions and descriptions are provided to `create_agent`, without the implementations. The `Run` or `event handler of stream` will raise a `requires_action` status based on the function definitions. Your code must handle this status and call the appropriate functions. - -For more details about calling functions by code, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) and [`sample_agents_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_functions.py). +You can enhance your Agents by defining callback functions as function tools. These can be provided to `create_agent` via either the `toolset` parameter or the combination of `tools` and `tool_resources`. For more details about requirements and specification of functions, refer to [Function Tool Specifications](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/FunctionTool.md) @@ -529,6 +524,7 @@ Here is an example to use [user functions](https://github.com/Azure/azure-sdk-fo functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) +project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], @@ -553,6 +549,7 @@ functions = AsyncFunctionTool(user_async_functions) toolset = AsyncToolSet() toolset.add(functions) +project_client.agents.enable_auto_function_calls(toolset=toolset) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], @@ -564,6 +561,9 @@ agent = await project_client.agents.create_agent( +Notice that if `enable_auto_function_calls` is called, the SDK will invoke the functions automatically during `create_and_process_run` or streaming. If you prefer to execute them manually, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) or +[`sample_agents_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_functions.py) + #### Create Agent With Azure Function Call The AI agent leverages Azure Functions triggered asynchronously via Azure Storage Queues. To enable the agent to perform Azure Function calls, you must set up the corresponding `AzureFunctionTool`, specifying input and output queues as well as parameter definitions. @@ -610,7 +610,6 @@ agent = project_client.agents.create_agent( Currently, the Azure Function integration for the AI Agent has the following limitations: -- Azure Functions integration is available **only for non-streaming scenarios**. - Supported trigger for Azure Function is currently limited to **Queue triggers** only. HTTP or other trigger types and streaming responses are not supported at this time. @@ -793,10 +792,17 @@ auth = OpenApiAnonymousAuthDetails() # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth + name="get_weather", + spec=openapi_weather, + description="Retrieve weather information for a location", + auth=auth, + default_parameters=["format"], ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth + name="get_countries", + spec=openapi_countries, + description="Retrieve a list of countries", + auth=auth, ) # Create agent with OpenApi tool and process assistant run @@ -885,6 +891,19 @@ thread = project_client.agents.create_thread(tool_resources=file_search.resource ``` + +#### List Threads + +To list all threads attached to a given agent, use the list_threads API: + + + +```python +threads = project_client.agents.list_threads() +``` + + + #### Create Message To create a message for assistant to process, you pass `user` as `role` and a question as `content`: @@ -966,6 +985,88 @@ message = project_client.agents.create_message( +#### Create Message with Image Inputs + +You can send messages to Azure agents with image inputs in following ways: + +- **Using an image stored as a uploaded file** +- **Using a public image accessible via URL** +- **Using a base64 encoded image string** + +The following examples demonstrate each method: + +##### Create message using uploaded image file + +```python +# Upload the local image file +image_file = project_client.agents.upload_file_and_poll(file_path="image_file.png", purpose="assistants") + +# Construct content using uploaded image +file_param = MessageImageFileParam(file_id=image_file.id, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageFileBlock(image_file=file_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + +##### Create message with an image URL input + +```python +# Specify the public image URL +image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + +# Create content directly referencing image URL +url_param = MessageImageUrlParam(url=image_url, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageUrlBlock(image_url=url_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + +##### Create message with base64-encoded image input + +```python +import base64 + +def image_file_to_base64(path: str) -> str: + with open(path, "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + +# Convert your image file to base64 format +image_base64 = image_file_to_base64("image_file.png") + +# Prepare the data URL +img_data_url = f"data:image/png;base64,{image_base64}" + +# Use base64 encoded string as image URL parameter +url_param = MessageImageUrlParam(url=img_data_url, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageUrlBlock(image_url=url_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + #### Create Run, Run_and_Process, or Stream To process your message, you can use `create_run`, `create_and_process_run`, or `create_stream`. @@ -988,7 +1089,7 @@ while run.status in ["queued", "in_progress", "requires_action"]: -To have the SDK poll on your behalf and call `function tools`, use the `create_and_process_run` method. Note that `function tools` will only be invoked if they are provided as `toolset` during the `create_agent` call. +To have the SDK poll on your behalf and call `function tools`, use the `create_and_process_run` method. Here is an example: diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json new file mode 100644 index 000000000000..edfd93b298e0 --- /dev/null +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -0,0 +1,274 @@ +{ + "CrossLanguagePackageId": "Azure.AI.Projects", + "CrossLanguageDefinitionId": { + "azure.ai.projects.models.Agent": "Azure.AI.Projects.Agents.Agent", + "azure.ai.projects.models.AgentDeletionStatus": "Azure.AI.Projects.Agents.AgentDeletionStatus", + "azure.ai.projects.models.AgentsApiResponseFormat": "Azure.AI.Projects.Agents.AgentsApiResponseFormat", + "azure.ai.projects.models.AgentsNamedToolChoice": "Azure.AI.Projects.Agents.AgentsNamedToolChoice", + "azure.ai.projects.models.AgentThread": "Azure.AI.Projects.Agents.AgentThread", + "azure.ai.projects.models.AgentThreadCreationOptions": "Azure.AI.Projects.Agents.AgentThreadCreationOptions", + "azure.ai.projects.models.AISearchIndexResource": "Azure.AI.Projects.Agents.AISearchIndexResource", + "azure.ai.projects.models.TargetModelConfig": "Azure.AI.Projects.TargetModelConfig", + "azure.ai.projects.models.AOAIModelConfig": "Azure.AI.Projects.AOAIModelConfig", + "azure.ai.projects.models.InputData": "Azure.AI.Projects.InputData", + "azure.ai.projects.models.ApplicationInsightsConfiguration": "Azure.AI.Projects.ApplicationInsightsConfiguration", + "azure.ai.projects.models.AzureAISearchResource": "Azure.AI.Projects.Agents.AzureAISearchResource", + "azure.ai.projects.models.ToolDefinition": "Azure.AI.Projects.Agents.ToolDefinition", + "azure.ai.projects.models.AzureAISearchToolDefinition": "Azure.AI.Projects.Agents.AzureAISearchToolDefinition", + "azure.ai.projects.models.AzureFunctionBinding": "Azure.AI.Projects.Agents.AzureFunctionBinding", + "azure.ai.projects.models.AzureFunctionDefinition": "Azure.AI.Projects.Agents.AzureFunctionDefinition", + "azure.ai.projects.models.AzureFunctionStorageQueue": "Azure.AI.Projects.Agents.AzureFunctionStorageQueue", + "azure.ai.projects.models.AzureFunctionToolDefinition": "Azure.AI.Projects.Agents.AzureFunctionToolDefinition", + "azure.ai.projects.models.BingCustomSearchToolDefinition": "Azure.AI.Projects.Agents.BingCustomSearchToolDefinition", + "azure.ai.projects.models.BingGroundingToolDefinition": "Azure.AI.Projects.Agents.BingGroundingToolDefinition", + "azure.ai.projects.models.CodeInterpreterToolDefinition": "Azure.AI.Projects.Agents.CodeInterpreterToolDefinition", + "azure.ai.projects.models.CodeInterpreterToolResource": "Azure.AI.Projects.Agents.CodeInterpreterToolResource", + "azure.ai.projects.models.ConnectedAgentDetails": "Azure.AI.Projects.Agents.ConnectedAgentDetails", + "azure.ai.projects.models.ConnectedAgentToolDefinition": "Azure.AI.Projects.Agents.ConnectedAgentToolDefinition", + "azure.ai.projects.models.Trigger": "Azure.AI.Projects.Trigger", + "azure.ai.projects.models.CronTrigger": "Azure.AI.Projects.CronTrigger", + "azure.ai.projects.models.Dataset": "Azure.AI.Projects.Dataset", + "azure.ai.projects.models.Evaluation": "Azure.AI.Projects.Evaluation", + "azure.ai.projects.models.EvaluationSchedule": "Azure.AI.Projects.EvaluationSchedule", + "azure.ai.projects.models.EvaluationTarget": "Azure.AI.Projects.EvaluationTarget", + "azure.ai.projects.models.EvaluatorConfiguration": "Azure.AI.Projects.EvaluatorConfiguration", + "azure.ai.projects.models.FileDeletionStatus": "Azure.AI.Projects.Agents.FileDeletionStatus", + "azure.ai.projects.models.FileListResponse": "Azure.AI.Projects.Agents.FileListResponse", + "azure.ai.projects.models.FileSearchRankingOptions": "Azure.AI.Projects.Agents.FileSearchRankingOptions", + "azure.ai.projects.models.FileSearchToolCallContent": "Azure.AI.Projects.Agents.FileSearchToolCallContent", + "azure.ai.projects.models.FileSearchToolDefinition": "Azure.AI.Projects.Agents.FileSearchToolDefinition", + "azure.ai.projects.models.FileSearchToolDefinitionDetails": "Azure.AI.Projects.Agents.FileSearchToolDefinitionDetails", + "azure.ai.projects.models.FileSearchToolResource": "Azure.AI.Projects.Agents.FileSearchToolResource", + "azure.ai.projects.models.FunctionDefinition": "Azure.AI.Projects.Agents.FunctionDefinition", + "azure.ai.projects.models.FunctionName": "Azure.AI.Projects.Agents.FunctionName", + "azure.ai.projects.models.FunctionToolDefinition": "Azure.AI.Projects.Agents.FunctionToolDefinition", + "azure.ai.projects.models.IncompleteRunDetails": "Azure.AI.Projects.Agents.IncompleteRunDetails", + "azure.ai.projects.models.MAASModelConfig": "Azure.AI.Projects.MAASModelConfig", + "azure.ai.projects.models.MessageAttachment": "Azure.AI.Projects.Agents.MessageAttachment", + "azure.ai.projects.models.MessageContent": "Azure.AI.Projects.Agents.MessageContent", + "azure.ai.projects.models.MessageDelta": "Azure.AI.Projects.Agents.MessageDelta", + "azure.ai.projects.models.MessageDeltaChunk": "Azure.AI.Projects.Agents.MessageDeltaChunk", + "azure.ai.projects.models.MessageDeltaContent": "Azure.AI.Projects.Agents.MessageDeltaContent", + "azure.ai.projects.models.MessageDeltaImageFileContent": "Azure.AI.Projects.Agents.MessageDeltaImageFileContent", + "azure.ai.projects.models.MessageDeltaImageFileContentObject": "Azure.AI.Projects.Agents.MessageDeltaImageFileContentObject", + "azure.ai.projects.models.MessageDeltaTextAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextAnnotation", + "azure.ai.projects.models.MessageDeltaTextContent": "Azure.AI.Projects.Agents.MessageDeltaTextContent", + "azure.ai.projects.models.MessageDeltaTextContentObject": "Azure.AI.Projects.Agents.MessageDeltaTextContentObject", + "azure.ai.projects.models.MessageDeltaTextFileCitationAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextFileCitationAnnotation", + "azure.ai.projects.models.MessageDeltaTextFileCitationAnnotationObject": "Azure.AI.Projects.Agents.MessageDeltaTextFileCitationAnnotationObject", + "azure.ai.projects.models.MessageDeltaTextFilePathAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextFilePathAnnotation", + "azure.ai.projects.models.MessageDeltaTextFilePathAnnotationObject": "Azure.AI.Projects.Agents.MessageDeltaTextFilePathAnnotationObject", + "azure.ai.projects.models.MessageDeltaTextUrlCitationAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextUrlCitationAnnotation", + "azure.ai.projects.models.MessageDeltaTextUrlCitationDetails": "Azure.AI.Projects.Agents.MessageDeltaTextUrlCitationDetails", + "azure.ai.projects.models.MessageImageFileContent": "Azure.AI.Projects.Agents.MessageImageFileContent", + "azure.ai.projects.models.MessageImageFileDetails": "Azure.AI.Projects.Agents.MessageImageFileDetails", + "azure.ai.projects.models.MessageImageFileParam": "Azure.AI.Projects.Agents.MessageImageFileParam", + "azure.ai.projects.models.MessageImageUrlParam": "Azure.AI.Projects.Agents.MessageImageUrlParam", + "azure.ai.projects.models.MessageIncompleteDetails": "Azure.AI.Projects.Agents.MessageIncompleteDetails", + "azure.ai.projects.models.MessageInputContentBlock": "Azure.AI.Projects.Agents.MessageInputContentBlock", + "azure.ai.projects.models.MessageInputImageFileBlock": "Azure.AI.Projects.Agents.MessageInputImageFileBlock", + "azure.ai.projects.models.MessageInputImageUrlBlock": "Azure.AI.Projects.Agents.MessageInputImageUrlBlock", + "azure.ai.projects.models.MessageInputTextBlock": "Azure.AI.Projects.Agents.MessageInputTextBlock", + "azure.ai.projects.models.MessageTextAnnotation": "Azure.AI.Projects.Agents.MessageTextAnnotation", + "azure.ai.projects.models.MessageTextContent": "Azure.AI.Projects.Agents.MessageTextContent", + "azure.ai.projects.models.MessageTextDetails": "Azure.AI.Projects.Agents.MessageTextDetails", + "azure.ai.projects.models.MessageTextFileCitationAnnotation": "Azure.AI.Projects.Agents.MessageTextFileCitationAnnotation", + "azure.ai.projects.models.MessageTextFileCitationDetails": "Azure.AI.Projects.Agents.MessageTextFileCitationDetails", + "azure.ai.projects.models.MessageTextFilePathAnnotation": "Azure.AI.Projects.Agents.MessageTextFilePathAnnotation", + "azure.ai.projects.models.MessageTextFilePathDetails": "Azure.AI.Projects.Agents.MessageTextFilePathDetails", + "azure.ai.projects.models.MessageTextUrlCitationAnnotation": "Azure.AI.Projects.Agents.MessageTextUrlCitationAnnotation", + "azure.ai.projects.models.MessageTextUrlCitationDetails": "Azure.AI.Projects.Agents.MessageTextUrlCitationDetails", + "azure.ai.projects.models.MicrosoftFabricToolDefinition": "Azure.AI.Projects.Agents.MicrosoftFabricToolDefinition", + "azure.ai.projects.models.OpenAIFile": "Azure.AI.Projects.Agents.OpenAIFile", + "azure.ai.projects.models.OpenAIPageableListOfAgent": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfAgentThread": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfRunStep": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfThreadMessage": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfThreadRun": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfVectorStore": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfVectorStoreFile": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenApiAuthDetails": "Azure.AI.Projects.Agents.OpenApiAuthDetails", + "azure.ai.projects.models.OpenApiAnonymousAuthDetails": "Azure.AI.Projects.Agents.OpenApiAnonymousAuthDetails", + "azure.ai.projects.models.OpenApiConnectionAuthDetails": "Azure.AI.Projects.Agents.OpenApiConnectionAuthDetails", + "azure.ai.projects.models.OpenApiConnectionSecurityScheme": "Azure.AI.Projects.Agents.OpenApiConnectionSecurityScheme", + "azure.ai.projects.models.OpenApiFunctionDefinition": "Azure.AI.Projects.Agents.OpenApiFunctionDefinition", + "azure.ai.projects.models.OpenApiManagedAuthDetails": "Azure.AI.Projects.Agents.OpenApiManagedAuthDetails", + "azure.ai.projects.models.OpenApiManagedSecurityScheme": "Azure.AI.Projects.Agents.OpenApiManagedSecurityScheme", + "azure.ai.projects.models.OpenApiToolDefinition": "Azure.AI.Projects.Agents.OpenApiToolDefinition", + "azure.ai.projects.models.RecurrenceSchedule": "Azure.AI.Projects.RecurrenceSchedule", + "azure.ai.projects.models.RecurrenceTrigger": "Azure.AI.Projects.RecurrenceTrigger", + "azure.ai.projects.models.RequiredAction": "Azure.AI.Projects.Agents.RequiredAction", + "azure.ai.projects.models.RequiredToolCall": "Azure.AI.Projects.Agents.RequiredToolCall", + "azure.ai.projects.models.RequiredFunctionToolCall": "Azure.AI.Projects.Agents.RequiredFunctionToolCall", + "azure.ai.projects.models.RequiredFunctionToolCallDetails": "Azure.AI.Projects.Agents.RequiredFunctionToolCallDetails", + "azure.ai.projects.models.ResponseFormatJsonSchema": "Azure.AI.Projects.Agents.ResponseFormatJsonSchema", + "azure.ai.projects.models.ResponseFormatJsonSchemaType": "Azure.AI.Projects.Agents.ResponseFormatJsonSchemaType", + "azure.ai.projects.models.RunCompletionUsage": "Azure.AI.Projects.Agents.RunCompletionUsage", + "azure.ai.projects.models.RunError": "Azure.AI.Projects.Agents.RunError", + "azure.ai.projects.models.RunStep": "Azure.AI.Projects.Agents.RunStep", + "azure.ai.projects.models.RunStepToolCall": "Azure.AI.Projects.Agents.RunStepToolCall", + "azure.ai.projects.models.RunStepAzureAISearchToolCall": "Azure.AI.Projects.Agents.RunStepAzureAISearchToolCall", + "azure.ai.projects.models.RunStepBingGroundingToolCall": "Azure.AI.Projects.Agents.RunStepBingGroundingToolCall", + "azure.ai.projects.models.RunStepCodeInterpreterToolCallOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCallOutput", + "azure.ai.projects.models.RunStepCodeInterpreterImageOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterImageOutput", + "azure.ai.projects.models.RunStepCodeInterpreterImageReference": "Azure.AI.Projects.Agents.RunStepCodeInterpreterImageReference", + "azure.ai.projects.models.RunStepCodeInterpreterLogOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterLogOutput", + "azure.ai.projects.models.RunStepCodeInterpreterToolCall": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCall", + "azure.ai.projects.models.RunStepCodeInterpreterToolCallDetails": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCallDetails", + "azure.ai.projects.models.RunStepCompletionUsage": "Azure.AI.Projects.Agents.RunStepCompletionUsage", + "azure.ai.projects.models.RunStepCustomSearchToolCall": "Azure.AI.Projects.Agents.RunStepCustomSearchToolCall", + "azure.ai.projects.models.RunStepDelta": "Azure.AI.Projects.Agents.RunStepDelta", + "azure.ai.projects.models.RunStepDeltaChunk": "Azure.AI.Projects.Agents.RunStepDeltaChunk", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterDetailItemObject": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterDetailItemObject", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterOutput", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterImageOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterImageOutput", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterImageOutputObject": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterImageOutputObject", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterLogOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterLogOutput", + "azure.ai.projects.models.RunStepDeltaToolCall": "Azure.AI.Projects.Agents.RunStepDeltaToolCall", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterToolCall": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterToolCall", + "azure.ai.projects.models.RunStepDeltaDetail": "Azure.AI.Projects.Agents.RunStepDeltaDetail", + "azure.ai.projects.models.RunStepDeltaFileSearchToolCall": "Azure.AI.Projects.Agents.RunStepDeltaFileSearchToolCall", + "azure.ai.projects.models.RunStepDeltaFunction": "Azure.AI.Projects.Agents.RunStepDeltaFunction", + "azure.ai.projects.models.RunStepDeltaFunctionToolCall": "Azure.AI.Projects.Agents.RunStepDeltaFunctionToolCall", + "azure.ai.projects.models.RunStepDeltaMessageCreation": "Azure.AI.Projects.Agents.RunStepDeltaMessageCreation", + "azure.ai.projects.models.RunStepDeltaMessageCreationObject": "Azure.AI.Projects.Agents.RunStepDeltaMessageCreationObject", + "azure.ai.projects.models.RunStepDeltaToolCallObject": "Azure.AI.Projects.Agents.RunStepDeltaToolCallObject", + "azure.ai.projects.models.RunStepDetails": "Azure.AI.Projects.Agents.RunStepDetails", + "azure.ai.projects.models.RunStepError": "Azure.AI.Projects.Agents.RunStepError", + "azure.ai.projects.models.RunStepFileSearchToolCall": "Azure.AI.Projects.Agents.RunStepFileSearchToolCall", + "azure.ai.projects.models.RunStepFileSearchToolCallResult": "Azure.AI.Projects.Agents.RunStepFileSearchToolCallResult", + "azure.ai.projects.models.RunStepFileSearchToolCallResults": "Azure.AI.Projects.Agents.RunStepFileSearchToolCallResults", + "azure.ai.projects.models.RunStepFunctionToolCall": "Azure.AI.Projects.Agents.RunStepFunctionToolCall", + "azure.ai.projects.models.RunStepFunctionToolCallDetails": "Azure.AI.Projects.Agents.RunStepFunctionToolCallDetails", + "azure.ai.projects.models.RunStepMessageCreationDetails": "Azure.AI.Projects.Agents.RunStepMessageCreationDetails", + "azure.ai.projects.models.RunStepMessageCreationReference": "Azure.AI.Projects.Agents.RunStepMessageCreationReference", + "azure.ai.projects.models.RunStepMicrosoftFabricToolCall": "Azure.AI.Projects.Agents.RunStepMicrosoftFabricToolCall", + "azure.ai.projects.models.RunStepOpenAPIToolCall": "Azure.AI.Projects.Agents.RunStepOpenAPIToolCall", + "azure.ai.projects.models.RunStepSharepointToolCall": "Azure.AI.Projects.Agents.RunStepSharepointToolCall", + "azure.ai.projects.models.RunStepToolCallDetails": "Azure.AI.Projects.Agents.RunStepToolCallDetails", + "azure.ai.projects.models.SearchConfiguration": "Azure.AI.Projects.Agents.SearchConfiguration", + "azure.ai.projects.models.SearchConfigurationList": "Azure.AI.Projects.Agents.SearchConfigurationList", + "azure.ai.projects.models.SharepointToolDefinition": "Azure.AI.Projects.Agents.SharepointToolDefinition", + "azure.ai.projects.models.SubmitToolOutputsAction": "Azure.AI.Projects.Agents.SubmitToolOutputsAction", + "azure.ai.projects.models.SubmitToolOutputsDetails": "Azure.AI.Projects.Agents.SubmitToolOutputsDetails", + "azure.ai.projects.models.SystemData": "Azure.AI.Projects.SystemData", + "azure.ai.projects.models.ThreadDeletionStatus": "Azure.AI.Projects.Agents.ThreadDeletionStatus", + "azure.ai.projects.models.ThreadMessage": "Azure.AI.Projects.Agents.ThreadMessage", + "azure.ai.projects.models.ThreadMessageOptions": "Azure.AI.Projects.Agents.ThreadMessageOptions", + "azure.ai.projects.models.ThreadRun": "Azure.AI.Projects.Agents.ThreadRun", + "azure.ai.projects.models.ToolConnection": "Azure.AI.Projects.Agents.ToolConnection", + "azure.ai.projects.models.ToolConnectionList": "Azure.AI.Projects.Agents.ToolConnectionList", + "azure.ai.projects.models.ToolOutput": "Azure.AI.Projects.Agents.ToolOutput", + "azure.ai.projects.models.ToolResources": "Azure.AI.Projects.Agents.ToolResources", + "azure.ai.projects.models.TruncationObject": "Azure.AI.Projects.Agents.TruncationObject", + "azure.ai.projects.models.UpdateCodeInterpreterToolResourceOptions": "Azure.AI.Projects.Agents.UpdateCodeInterpreterToolResourceOptions", + "azure.ai.projects.models.UpdateFileSearchToolResourceOptions": "Azure.AI.Projects.Agents.UpdateFileSearchToolResourceOptions", + "azure.ai.projects.models.UpdateToolResourcesOptions": "Azure.AI.Projects.Agents.UpdateToolResourcesOptions", + "azure.ai.projects.models.VectorStore": "Azure.AI.Projects.Agents.VectorStore", + "azure.ai.projects.models.VectorStoreChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreAutoChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreAutoChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyResponse", + "azure.ai.projects.models.VectorStoreAutoChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreAutoChunkingStrategyResponse", + "azure.ai.projects.models.VectorStoreConfiguration": "Azure.AI.Projects.Agents.VectorStoreConfiguration", + "azure.ai.projects.models.VectorStoreConfigurations": "Azure.AI.Projects.Agents.VectorStoreConfigurations", + "azure.ai.projects.models.VectorStoreDataSource": "Azure.AI.Projects.Agents.VectorStoreDataSource", + "azure.ai.projects.models.VectorStoreDeletionStatus": "Azure.AI.Projects.Agents.VectorStoreDeletionStatus", + "azure.ai.projects.models.VectorStoreExpirationPolicy": "Azure.AI.Projects.Agents.VectorStoreExpirationPolicy", + "azure.ai.projects.models.VectorStoreFile": "Azure.AI.Projects.Agents.VectorStoreFile", + "azure.ai.projects.models.VectorStoreFileBatch": "Azure.AI.Projects.Agents.VectorStoreFileBatch", + "azure.ai.projects.models.VectorStoreFileCount": "Azure.AI.Projects.Agents.VectorStoreFileCount", + "azure.ai.projects.models.VectorStoreFileDeletionStatus": "Azure.AI.Projects.Agents.VectorStoreFileDeletionStatus", + "azure.ai.projects.models.VectorStoreFileError": "Azure.AI.Projects.Agents.VectorStoreFileError", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyOptions": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyOptions", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyResponse", + "azure.ai.projects.models.OpenApiAuthType": "Azure.AI.Projects.Agents.OpenApiAuthType", + "azure.ai.projects.models.VectorStoreDataSourceAssetType": "Azure.AI.Projects.Agents.VectorStoreDataSourceAssetType", + "azure.ai.projects.models.AzureAISearchQueryType": "Azure.AI.Projects.Agents.AzureAISearchQueryType", + "azure.ai.projects.models.AgentsApiResponseFormatMode": "Azure.AI.Projects.Agents.AgentsApiResponseFormatMode", + "azure.ai.projects.models.ResponseFormat": "Azure.AI.Projects.Agents.ResponseFormat", + "azure.ai.projects.models.ListSortOrder": "Azure.AI.Projects.Agents.ListSortOrder", + "azure.ai.projects.models.MessageRole": "Azure.AI.Projects.Agents.MessageRole", + "azure.ai.projects.models.MessageBlockType": "Azure.AI.Projects.Agents.MessageBlockType", + "azure.ai.projects.models.ImageDetailLevel": "Azure.AI.Projects.Agents.ImageDetailLevel", + "azure.ai.projects.models.MessageStatus": "Azure.AI.Projects.Agents.MessageStatus", + "azure.ai.projects.models.MessageIncompleteDetailsReason": "Azure.AI.Projects.Agents.MessageIncompleteDetailsReason", + "azure.ai.projects.models.RunStatus": "Azure.AI.Projects.Agents.RunStatus", + "azure.ai.projects.models.IncompleteDetailsReason": "Azure.AI.Projects.Agents.IncompleteDetailsReason", + "azure.ai.projects.models.TruncationStrategy": "Azure.AI.Projects.Agents.TruncationStrategy", + "azure.ai.projects.models.AgentsApiToolChoiceOptionMode": "Azure.AI.Projects.Agents.AgentsApiToolChoiceOptionMode", + "azure.ai.projects.models.AgentsNamedToolChoiceType": "Azure.AI.Projects.Agents.AgentsNamedToolChoiceType", + "azure.ai.projects.models.RunAdditionalFieldList": "Azure.AI.Projects.Agents.RunAdditionalFieldList", + "azure.ai.projects.models.RunStepType": "Azure.AI.Projects.Agents.RunStepType", + "azure.ai.projects.models.RunStepStatus": "Azure.AI.Projects.Agents.RunStepStatus", + "azure.ai.projects.models.RunStepErrorCode": "Azure.AI.Projects.Agents.RunStepErrorCode", + "azure.ai.projects.models.FilePurpose": "Azure.AI.Projects.Agents.FilePurpose", + "azure.ai.projects.models.FileState": "Azure.AI.Projects.Agents.FileState", + "azure.ai.projects.models.VectorStoreStatus": "Azure.AI.Projects.Agents.VectorStoreStatus", + "azure.ai.projects.models.VectorStoreExpirationPolicyAnchor": "Azure.AI.Projects.Agents.VectorStoreExpirationPolicyAnchor", + "azure.ai.projects.models.VectorStoreChunkingStrategyRequestType": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyRequestType", + "azure.ai.projects.models.VectorStoreFileStatus": "Azure.AI.Projects.Agents.VectorStoreFileStatus", + "azure.ai.projects.models.VectorStoreFileErrorCode": "Azure.AI.Projects.Agents.VectorStoreFileErrorCode", + "azure.ai.projects.models.VectorStoreChunkingStrategyResponseType": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyResponseType", + "azure.ai.projects.models.VectorStoreFileStatusFilter": "Azure.AI.Projects.Agents.VectorStoreFileStatusFilter", + "azure.ai.projects.models.VectorStoreFileBatchStatus": "Azure.AI.Projects.Agents.VectorStoreFileBatchStatus", + "azure.ai.projects.models.AuthenticationType": "Azure.AI.Projects.AuthenticationType", + "azure.ai.projects.models.ConnectionType": "Azure.AI.Projects.ConnectionType", + "azure.ai.projects.models.Frequency": "Azure.AI.Projects.Frequency", + "azure.ai.projects.models.WeekDays": "Azure.AI.Projects.WeekDays", + "azure.ai.projects.models.ThreadStreamEvent": "Azure.AI.Projects.Agents.ThreadStreamEvent", + "azure.ai.projects.models.RunStreamEvent": "Azure.AI.Projects.Agents.RunStreamEvent", + "azure.ai.projects.models.RunStepStreamEvent": "Azure.AI.Projects.Agents.RunStepStreamEvent", + "azure.ai.projects.models.MessageStreamEvent": "Azure.AI.Projects.Agents.MessageStreamEvent", + "azure.ai.projects.models.ErrorEvent": "Azure.AI.Projects.Agents.ErrorEvent", + "azure.ai.projects.models.DoneEvent": "Azure.AI.Projects.Agents.DoneEvent", + "azure.ai.projects.models.AgentStreamEvent": "Azure.AI.Projects.Agents.AgentStreamEvent", + "azure.ai.projects.AIProjectClient.agents.create_agent": "Azure.AI.Projects.Agents.createAgent", + "azure.ai.projects.AIProjectClient.agents.list_agents": "Azure.AI.Projects.Agents.listAgents", + "azure.ai.projects.AIProjectClient.agents.get_agent": "Azure.AI.Projects.Agents.getAgent", + "azure.ai.projects.AIProjectClient.agents.update_agent": "Azure.AI.Projects.Agents.updateAgent", + "azure.ai.projects.AIProjectClient.agents.delete_agent": "Azure.AI.Projects.Agents.deleteAgent", + "azure.ai.projects.AIProjectClient.agents.create_thread": "Azure.AI.Projects.Agents.createThread", + "azure.ai.projects.AIProjectClient.agents.get_thread": "Azure.AI.Projects.Agents.getThread", + "azure.ai.projects.AIProjectClient.agents.update_thread": "Azure.AI.Projects.Agents.updateThread", + "azure.ai.projects.AIProjectClient.agents.delete_thread": "Azure.AI.Projects.Agents.deleteThread", + "azure.ai.projects.AIProjectClient.agents.list_threads": "Azure.AI.Projects.Agents.listThreads", + "azure.ai.projects.AIProjectClient.agents.create_message": "Azure.AI.Projects.Agents.createMessage", + "azure.ai.projects.AIProjectClient.agents.list_messages": "Azure.AI.Projects.Agents.listMessages", + "azure.ai.projects.AIProjectClient.agents.get_message": "Azure.AI.Projects.Agents.getMessage", + "azure.ai.projects.AIProjectClient.agents.update_message": "Azure.AI.Projects.Agents.updateMessage", + "azure.ai.projects.AIProjectClient.agents.create_run": "Azure.AI.Projects.Agents.createRun", + "azure.ai.projects.AIProjectClient.agents.list_runs": "Azure.AI.Projects.Agents.listRuns", + "azure.ai.projects.AIProjectClient.agents.get_run": "Azure.AI.Projects.Agents.getRun", + "azure.ai.projects.AIProjectClient.agents.update_run": "Azure.AI.Projects.Agents.updateRun", + "azure.ai.projects.AIProjectClient.agents.submit_tool_outputs_to_run": "Azure.AI.Projects.Agents.submitToolOutputsToRun", + "azure.ai.projects.AIProjectClient.agents.cancel_run": "Azure.AI.Projects.Agents.cancelRun", + "azure.ai.projects.AIProjectClient.agents.create_thread_and_run": "Azure.AI.Projects.Agents.createThreadAndRun", + "azure.ai.projects.AIProjectClient.agents.get_run_step": "Azure.AI.Projects.Agents.getRunStep", + "azure.ai.projects.AIProjectClient.agents.list_run_steps": "Azure.AI.Projects.Agents.listRunSteps", + "azure.ai.projects.AIProjectClient.agents.list_files": "Azure.AI.Projects.Agents.listFiles", + "azure.ai.projects.AIProjectClient.agents.delete_file": "Azure.AI.Projects.Agents.deleteFile", + "azure.ai.projects.AIProjectClient.agents.get_file": "Azure.AI.Projects.Agents.getFile", + "azure.ai.projects.AIProjectClient.agents.list_vector_stores": "Azure.AI.Projects.Agents.listVectorStores", + "azure.ai.projects.AIProjectClient.agents.create_vector_store": "Azure.AI.Projects.Agents.createVectorStore", + "azure.ai.projects.AIProjectClient.agents.get_vector_store": "Azure.AI.Projects.Agents.getVectorStore", + "azure.ai.projects.AIProjectClient.agents.modify_vector_store": "Azure.AI.Projects.Agents.modifyVectorStore", + "azure.ai.projects.AIProjectClient.agents.delete_vector_store": "Azure.AI.Projects.Agents.deleteVectorStore", + "azure.ai.projects.AIProjectClient.agents.list_vector_store_files": "Azure.AI.Projects.Agents.listVectorStoreFiles", + "azure.ai.projects.AIProjectClient.agents.create_vector_store_file": "Azure.AI.Projects.Agents.createVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.get_vector_store_file": "Azure.AI.Projects.Agents.getVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.delete_vector_store_file": "Azure.AI.Projects.Agents.deleteVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.create_vector_store_file_batch": "Azure.AI.Projects.Agents.createVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.get_vector_store_file_batch": "Azure.AI.Projects.Agents.getVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.cancel_vector_store_file_batch": "Azure.AI.Projects.Agents.cancelVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.list_vector_store_file_batch_files": "Azure.AI.Projects.Agents.listVectorStoreFileBatchFiles", + "azure.ai.projects.AIProjectClient.evaluations.get": "Azure.AI.Projects.Evaluations.get", + "azure.ai.projects.AIProjectClient.evaluations.create": "Azure.AI.Projects.Evaluations.create", + "azure.ai.projects.AIProjectClient.evaluations.list": "Azure.AI.Projects.Evaluations.list", + "azure.ai.projects.AIProjectClient.evaluations.update": "Azure.AI.Projects.Evaluations.update", + "azure.ai.projects.AIProjectClient.evaluations.get_schedule": "Azure.AI.Projects.Evaluations.getSchedule", + "azure.ai.projects.AIProjectClient.evaluations.create_or_replace_schedule": "Azure.AI.Projects.Evaluations.createOrReplaceSchedule", + "azure.ai.projects.AIProjectClient.evaluations.list_schedule": "Azure.AI.Projects.Evaluations.listSchedule", + "azure.ai.projects.AIProjectClient.evaluations.disable_schedule": "Azure.AI.Projects.Evaluations.disableSchedule" + } +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py index b3e215c68df7..853feb83403d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py @@ -36,9 +36,9 @@ class AIProjectClient: :vartype evaluations: azure.ai.projects.operations.EvaluationsOperations :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\\\ , where - :code:`` is the Azure region where the project is deployed (e.g. westus) and - :code:`` is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -119,12 +119,16 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: request_copy = deepcopy(request) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py index 9b8efcae3c2b..3bbeed37a0c7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py @@ -24,9 +24,9 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\ , where :code:`` - is the Azure region where the project is deployed (e.g. westus) and :code:`` - is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py index 1c059e5809cc..ff7e15ec008a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union +from typing import List, TYPE_CHECKING, Union if TYPE_CHECKING: from . import models as _models @@ -17,5 +17,6 @@ "_models.AgentsApiResponseFormat", "_models.ResponseFormatJsonSchemaType", ] +MessageInputContent = Union[str, List["_models.MessageInputContentBlock"]] MessageAttachmentToolDefinition = Union["_models.CodeInterpreterToolDefinition", "_models.FileSearchToolDefinition"] AgentsApiToolChoiceOption = Union[str, str, "_models.AgentsApiToolChoiceOptionMode", "_models.AgentsNamedToolChoice"] diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py index ca67f288ad6b..b1c2836b6921 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b8" +VERSION = "1.0.0b9" diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py index 1057faa04d88..7bfca0ef6b4a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py @@ -36,9 +36,9 @@ class AIProjectClient: :vartype evaluations: azure.ai.projects.aio.operations.EvaluationsOperations :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\\\ , where - :code:`` is the Azure region where the project is deployed (e.g. westus) and - :code:`` is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -121,12 +121,16 @@ def send_request( request_copy = deepcopy(request) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py index f9fb99fbc947..48b480a960b7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py @@ -24,9 +24,9 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\ , where :code:`` - is the Azure region where the project is deployed (e.g. westus) and :code:`` - is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index fc32e1aeaf02..b1f9ba8cb084 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -9,7 +9,20 @@ from io import IOBase import json import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TYPE_CHECKING, TypeVar, Union, overload +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + IO, + List, + Optional, + TYPE_CHECKING, + TypeVar, + Union, + overload, +) import urllib.parse from azure.core import AsyncPipelineClient @@ -33,7 +46,7 @@ from ... import _model_base, models as _models from ..._model_base import SdkJSONEncoder, _deserialize from ..._serialization import Deserializer, Serializer -from ..._vendor import FileType, prepare_multipart_form_data +from ..._vendor import prepare_multipart_form_data from ...operations._operations import ( build_agents_cancel_run_request, build_agents_cancel_vector_store_file_batch_request, @@ -65,6 +78,7 @@ build_agents_list_messages_request, build_agents_list_run_steps_request, build_agents_list_runs_request, + build_agents_list_threads_request, build_agents_list_vector_store_file_batch_files_request, build_agents_list_vector_store_files_request, build_agents_list_vector_stores_request, @@ -322,12 +336,16 @@ async def create_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -413,12 +431,16 @@ async def list_agents( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -478,12 +500,16 @@ async def get_agent(self, agent_id: str, **kwargs: Any) -> _models.Agent: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -729,12 +755,16 @@ async def update_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -794,12 +824,16 @@ async def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDelet params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -959,12 +993,16 @@ async def create_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1024,12 +1062,16 @@ async def get_thread(self, thread_id: str, **kwargs: Any) -> _models.AgentThread params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1192,12 +1234,16 @@ async def update_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1257,12 +1303,16 @@ async def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDe params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1292,13 +1342,108 @@ async def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDe return deserialized # type: ignore + @distributed_trace_async + async def list_threads( + self, + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIPageableListOfAgentThread: + """Gets a list of threads that were previously created. + + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order + and desc for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.ListSortOrder + :keyword after: A cursor for use in pagination. after is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the + list. Default value is None. + :paramtype after: str + :keyword before: A cursor for use in pagination. before is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of + the list. Default value is None. + :paramtype before: str + :return: OpenAIPageableListOfAgentThread. The OpenAIPageableListOfAgentThread is compatible + with MutableMapping + :rtype: ~azure.ai.projects.models.OpenAIPageableListOfAgentThread + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.OpenAIPageableListOfAgentThread] = kwargs.pop("cls", None) + + _request = build_agents_list_threads_request( + limit=limit, + order=order, + after=after, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), + "resourceGroupName": self._serialize.url( + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.OpenAIPageableListOfAgentThread, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload async def create_message( self, thread_id: str, *, role: Union[str, _models.MessageRole], - content: str, + content: "_types.MessageInputContent", content_type: str = "application/json", attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, @@ -1309,18 +1454,16 @@ async def create_message( :param thread_id: Identifier of the thread. Required. :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1380,7 +1523,7 @@ async def create_message( body: Union[JSON, IO[bytes]] = _Unset, *, role: Union[str, _models.MessageRole] = _Unset, - content: str = _Unset, + content: "_types.MessageInputContent" = _Unset, attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -1392,18 +1535,16 @@ async def create_message( :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword attachments: A list of files attached to the message, and the tools they should be added to. Default value is None. :paramtype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -1453,12 +1594,16 @@ async def create_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1552,12 +1697,16 @@ async def list_messages( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1620,12 +1769,16 @@ async def get_message(self, thread_id: str, message_id: str, **kwargs: Any) -> _ params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1785,12 +1938,16 @@ async def update_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1875,8 +2032,8 @@ async def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2045,8 +2202,8 @@ async def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2155,12 +2312,16 @@ async def create_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2250,12 +2411,16 @@ async def list_runs( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2318,12 +2483,16 @@ async def get_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.T params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2483,12 +2652,16 @@ async def update_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2660,12 +2833,16 @@ async def submit_tool_outputs_to_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2728,12 +2905,16 @@ async def cancel_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _model params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2808,8 +2989,8 @@ async def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2942,8 +3123,8 @@ async def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3050,12 +3231,16 @@ async def create_thread_and_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3135,12 +3320,16 @@ async def get_run_step( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3241,12 +3430,16 @@ async def list_run_steps( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3310,12 +3503,16 @@ async def list_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3346,59 +3543,18 @@ async def list_files( return deserialized # type: ignore @overload - async def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ - + async def _upload_file(self, body: _models._models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: ... @overload - async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :param body: Required. - :type body: JSON - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ + async def _upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: ... @distributed_trace_async - async def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any + async def _upload_file( + self, body: Union[_models._models.UploadFileRequest, JSON], **kwargs: Any ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models._models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3416,13 +3572,6 @@ async def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -3436,12 +3585,16 @@ async def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3501,12 +3654,16 @@ async def delete_file(self, file_id: str, **kwargs: Any) -> _models.FileDeletion params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3566,12 +3723,16 @@ async def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3602,13 +3763,13 @@ async def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: return deserialized # type: ignore @distributed_trace_async - async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: + async def _get_file_content(self, file_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Retrieves the raw content of a specific file. :param file_id: The ID of the file to retrieve. Required. :type file_id: str - :return: bytes - :rtype: bytes + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3622,7 +3783,7 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[bytes] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_agents_get_file_content_request( file_id=file_id, @@ -3631,16 +3792,20 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = kwargs.pop("stream", True) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3656,10 +3821,7 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(bytes, response.json(), format="base64") + deserialized = response.iter_bytes() if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3722,12 +3884,16 @@ async def list_vector_stores( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3908,12 +4074,16 @@ async def create_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3973,12 +4143,16 @@ async def get_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4139,12 +4313,16 @@ async def modify_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4205,12 +4383,16 @@ async def delete_vector_store(self, vector_store_id: str, **kwargs: Any) -> _mod params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4305,12 +4487,16 @@ async def list_vector_store_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4467,12 +4653,16 @@ async def create_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4535,12 +4725,16 @@ async def get_vector_store_file(self, vector_store_id: str, file_id: str, **kwar params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4608,12 +4802,16 @@ async def delete_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4770,12 +4968,16 @@ async def create_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4840,12 +5042,16 @@ async def get_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4911,12 +5117,16 @@ async def cancel_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5015,12 +5225,16 @@ async def list_vector_store_file_batch_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5095,12 +5309,16 @@ async def _get_workspace(self, **kwargs: Any) -> _models._models.GetWorkspaceRes params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5178,12 +5396,16 @@ async def _list_connections( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5245,12 +5467,16 @@ async def _get_connection(self, connection_name: str, **kwargs: Any) -> _models. params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5346,12 +5572,16 @@ async def _get_connection_with_secrets( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5436,12 +5666,16 @@ async def _get_app_insights( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5521,12 +5755,16 @@ async def get(self, id: str, **kwargs: Any) -> _models.Evaluation: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5649,12 +5887,16 @@ async def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], * params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5724,14 +5966,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5749,14 +5995,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5764,7 +6014,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Evaluation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -5890,12 +6140,16 @@ async def update( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5961,12 +6215,16 @@ async def get_schedule(self, name: str, **kwargs: Any) -> _models.EvaluationSche params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6104,12 +6362,16 @@ async def create_or_replace_schedule( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6184,14 +6446,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6209,14 +6475,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6224,7 +6494,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized["value"]) + list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -6276,12 +6546,16 @@ async def disable_schedule(self, name: str, **kwargs: Any) -> None: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index ca9fe7d7ad6a..b292a194771e 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio import concurrent.futures import io import logging @@ -27,6 +27,8 @@ TextIO, Union, cast, + Callable, + Set, overload, ) @@ -661,7 +663,7 @@ class AgentsOperations(AgentsOperationsGenerated): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self._toolset: Dict[str, _models.AsyncToolSet] = {} + self._function_tool = _models.AsyncFunctionTool(set()) # pylint: disable=arguments-differ @overload @@ -892,8 +894,6 @@ async def create_agent( **kwargs, ) - if toolset is not None: - self._toolset[new_agent.id] = toolset return new_agent # pylint: disable=arguments-differ @@ -1146,7 +1146,6 @@ async def update_agent( return await super().update_agent(body=body, **kwargs) if toolset is not None: - self._toolset[agent_id] = toolset tools = toolset.definitions tool_resources = toolset.resources @@ -1640,11 +1639,9 @@ async def create_and_process_run( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = toolset or self._toolset.get(run.agent_id) - if toolset: - tool_outputs = await toolset.execute_tool_calls(tool_calls) - else: - raise ValueError("Toolset is not available in the client.") + toolset = _models.AsyncToolSet() + toolset.add(self._function_tool) + tool_outputs = await toolset.execute_tool_calls(tool_calls) logging.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2333,13 +2330,13 @@ async def _handle_submit_tool_outputs( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. - if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = self._toolset.get(run.agent_id) - if toolset: - tool_outputs = await toolset.execute_tool_calls(tool_calls) - else: - logger.debug("Toolset is not available in the client.") - return + if ( + any(tool_call.type == "function" for tool_call in tool_calls) + and len(self._function_tool.definitions) > 0 + ): + toolset = _models.AsyncToolSet() + toolset.add(self._function_tool) + tool_outputs = await toolset.execute_tool_calls(tool_calls) logger.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2425,15 +2422,19 @@ async def upload_file( :raises IOError: If there are issues with reading the file. :raises: HttpResponseError for HTTP errors. """ + # If a JSON body is provided directly, pass it along if body is not None: - return await super().upload_file(body=body, **kwargs) + return await super()._upload_file(body=body, **kwargs) + # Convert FilePurpose enum to string if necessary if isinstance(purpose, FilePurpose): purpose = purpose.value + # If file content is passed in directly if file is not None and purpose is not None: - return await super().upload_file(file=file, purpose=purpose, filename=filename, **kwargs) + return await super()._upload_file(body={"file": file, "purpose": purpose, "filename": filename}, **kwargs) + # If a file path is provided if file_path is not None and purpose is not None: if not os.path.isfile(file_path): raise FileNotFoundError(f"The file path provided does not exist: {file_path}") @@ -2442,11 +2443,11 @@ async def upload_file( with open(file_path, "rb") as f: content = f.read() - # Determine filename and create correct FileType + # If no explicit filename is provided, use the base name base_filename = filename or os.path.basename(file_path) file_content: FileType = (base_filename, content) - return await super().upload_file(file=file_content, purpose=purpose, **kwargs) + return await super()._upload_file(body={"file": file_content, "purpose": purpose}, **kwargs) except IOError as e: raise IOError(f"Unable to read file: {file_path}.") from e @@ -3124,10 +3125,56 @@ async def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDelet :rtype: ~azure.ai.projects.models.AgentDeletionStatus :raises ~azure.core.exceptions.HttpResponseError: """ - if agent_id in self._toolset: - del self._toolset[agent_id] return await super().delete_agent(agent_id, **kwargs) + @overload + def enable_auto_function_calls(self, *, functions: Set[Callable[..., Any]]) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + """ + + @overload + def enable_auto_function_calls(self, *, function_tool: _models.AsyncFunctionTool) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword function_tool: An AsyncFunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.AsyncFunctionTool] + """ + + @overload + def enable_auto_function_calls(self, *, toolset: _models.AsyncToolSet) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword toolset: An AsyncToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.AsyncToolSet] + """ + + def enable_auto_function_calls( + self, + *, + functions: Optional[Set[Callable[..., Any]]] = None, + function_tool: Optional[_models.AsyncFunctionTool] = None, + toolset: Optional[_models.AsyncToolSet] = None, + ) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + :keyword function_tool: An AsyncFunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.AsyncFunctionTool] + :keyword toolset: An AsyncToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.AsyncToolSet] + """ + if functions: + self._function_tool = _models.AsyncFunctionTool(functions) + elif function_tool: + self._function_tool = function_tool + elif toolset: + tool = toolset.get_tool(_models.AsyncFunctionTool) + self._function_tool = tool + class _SyncCredentialWrapper(TokenCredential): """ diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a44d43c67e3b..6e1bbfc0c2fc 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -29,9 +29,12 @@ AzureFunctionDefinition, AzureFunctionStorageQueue, AzureFunctionToolDefinition, + BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, + ConnectedAgentDetails, + ConnectedAgentToolDefinition, CronTrigger, Dataset, Evaluation, @@ -69,7 +72,13 @@ MessageDeltaTextUrlCitationDetails, MessageImageFileContent, MessageImageFileDetails, + MessageImageFileParam, + MessageImageUrlParam, MessageIncompleteDetails, + MessageInputContentBlock, + MessageInputImageFileBlock, + MessageInputImageUrlBlock, + MessageInputTextBlock, MessageTextAnnotation, MessageTextContent, MessageTextDetails, @@ -82,6 +91,7 @@ MicrosoftFabricToolDefinition, OpenAIFile, OpenAIPageableListOfAgent, + OpenAIPageableListOfAgentThread, OpenAIPageableListOfRunStep, OpenAIPageableListOfThreadMessage, OpenAIPageableListOfThreadRun, @@ -115,6 +125,7 @@ RunStepCodeInterpreterToolCallDetails, RunStepCodeInterpreterToolCallOutput, RunStepCompletionUsage, + RunStepCustomSearchToolCall, RunStepDelta, RunStepDeltaChunk, RunStepDeltaCodeInterpreterDetailItemObject, @@ -141,9 +152,12 @@ RunStepMessageCreationDetails, RunStepMessageCreationReference, RunStepMicrosoftFabricToolCall, + RunStepOpenAPIToolCall, RunStepSharepointToolCall, RunStepToolCall, RunStepToolCallDetails, + SearchConfiguration, + SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, SubmitToolOutputsDetails, @@ -196,8 +210,10 @@ FilePurpose, FileState, Frequency, + ImageDetailLevel, IncompleteDetailsReason, ListSortOrder, + MessageBlockType, MessageIncompleteDetailsReason, MessageRole, MessageStatus, @@ -244,9 +260,12 @@ "AzureFunctionDefinition", "AzureFunctionStorageQueue", "AzureFunctionToolDefinition", + "BingCustomSearchToolDefinition", "BingGroundingToolDefinition", "CodeInterpreterToolDefinition", "CodeInterpreterToolResource", + "ConnectedAgentDetails", + "ConnectedAgentToolDefinition", "CronTrigger", "Dataset", "Evaluation", @@ -284,7 +303,13 @@ "MessageDeltaTextUrlCitationDetails", "MessageImageFileContent", "MessageImageFileDetails", + "MessageImageFileParam", + "MessageImageUrlParam", "MessageIncompleteDetails", + "MessageInputContentBlock", + "MessageInputImageFileBlock", + "MessageInputImageUrlBlock", + "MessageInputTextBlock", "MessageTextAnnotation", "MessageTextContent", "MessageTextDetails", @@ -297,6 +322,7 @@ "MicrosoftFabricToolDefinition", "OpenAIFile", "OpenAIPageableListOfAgent", + "OpenAIPageableListOfAgentThread", "OpenAIPageableListOfRunStep", "OpenAIPageableListOfThreadMessage", "OpenAIPageableListOfThreadRun", @@ -330,6 +356,7 @@ "RunStepCodeInterpreterToolCallDetails", "RunStepCodeInterpreterToolCallOutput", "RunStepCompletionUsage", + "RunStepCustomSearchToolCall", "RunStepDelta", "RunStepDeltaChunk", "RunStepDeltaCodeInterpreterDetailItemObject", @@ -356,9 +383,12 @@ "RunStepMessageCreationDetails", "RunStepMessageCreationReference", "RunStepMicrosoftFabricToolCall", + "RunStepOpenAPIToolCall", "RunStepSharepointToolCall", "RunStepToolCall", "RunStepToolCallDetails", + "SearchConfiguration", + "SearchConfigurationList", "SharepointToolDefinition", "SubmitToolOutputsAction", "SubmitToolOutputsDetails", @@ -408,8 +438,10 @@ "FilePurpose", "FileState", "Frequency", + "ImageDetailLevel", "IncompleteDetailsReason", "ListSortOrder", + "MessageBlockType", "MessageIncompleteDetailsReason", "MessageRole", "MessageStatus", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 70c4fd3daa6a..20b2b87e2cc2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -16,7 +16,7 @@ class AgentsApiResponseFormatMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): AUTO = "auto" """Default value. Let the model handle the return format.""" NONE = "none" - """Setting the value to ``none``\\ , will result in a 400 Bad request.""" + """Setting the value to ``none``, will result in a 400 Bad request.""" class AgentsApiToolChoiceOptionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -45,6 +45,10 @@ class AgentsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tool type ``sharepoint_grounding``""" AZURE_AI_SEARCH = "azure_ai_search" """Tool type ``azure_ai_search``""" + BING_CUSTOM_SEARCH = "bing_custom_search" + """Tool type ``bing_custom_search``""" + CONNECTED_AGENT = "connected_agent" + """Tool type ``connected_agent``""" class AgentStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -249,6 +253,17 @@ class Frequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): MINUTE = "Minute" +class ImageDetailLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies an image's detail level. Can be 'auto', 'low', 'high', or an unknown future value.""" + + AUTO = "auto" + """Automatically select an appropriate detail level.""" + LOW = "low" + """Use a lower detail level to reduce bandwidth or cost.""" + HIGH = "high" + """Use a higher detail level—potentially more resource-intensive.""" + + class IncompleteDetailsReason(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The reason why the run is incomplete. This will point to which specific token limit was reached over the course of the run. @@ -269,6 +284,19 @@ class ListSortOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies a descending sort order.""" +class MessageBlockType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the kind of content block within a message. Could be text, an image file, an external + image URL, or an unknown future type. + """ + + TEXT = "text" + """Indicates a block containing text content.""" + IMAGE_FILE = "image_file" + """Indicates a block referencing an internally uploaded image file.""" + IMAGE_URL = "image_url" + """Indicates a block referencing an external image URL.""" + + class MessageIncompleteDetailsReason(str, Enum, metaclass=CaseInsensitiveEnumMeta): """A set of reasons describing why a message is marked as incomplete.""" diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index aefa9d474688..718754ae47b1 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -13,8 +13,10 @@ from .. import _model_base from .._model_base import rest_discriminator, rest_field +from .._vendor import FileType from ._enums import ( AuthenticationType, + MessageBlockType, OpenApiAuthType, RunStepType, VectorStoreChunkingStrategyRequestType, @@ -223,9 +225,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentsNamedToolChoice(_model_base.Model): """Specifies a tool the model should use. Use to force the model to call a specific tool. - :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. + :ivar type: the type of tool. If type is ``function``, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", - "fabric_dataagent", "sharepoint_grounding", and "azure_ai_search". + "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", "bing_custom_search", and + "connected_agent". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType :ivar function: The name of the function to call. :vartype function: ~azure.ai.projects.models.FunctionName @@ -234,9 +237,10 @@ class AgentsNamedToolChoice(_model_base.Model): type: Union[str, "_models.AgentsNamedToolChoiceType"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """the type of tool. If type is \"function\" , the function name must be set. Required. Known + """the type of tool. If type is ``function``, the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", - \"fabric_dataagent\", \"sharepoint_grounding\", and \"azure_ai_search\".""" + \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", \"bing_custom_search\", + and \"connected_agent\".""" function: Optional["_models.FunctionName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call.""" @@ -467,7 +471,7 @@ class AOAIModelConfig(TargetModelConfig, discriminator="AOAI"): :ivar type: Required. Default value is "AOAI". :vartype type: str - :ivar azure_endpoint: Endpoint URL for AOAI model. Required. + :ivar azure_endpoint: Endpoint targetURI for AOAI model. Required. :vartype azure_endpoint: str :ivar api_key: API Key for AOAI model. Required. :vartype api_key: str @@ -478,7 +482,7 @@ class AOAIModelConfig(TargetModelConfig, discriminator="AOAI"): type: Literal["AOAI"] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """Required. Default value is \"AOAI\".""" azure_endpoint: str = rest_field(name="azureEndpoint", visibility=["read", "create", "update", "delete", "query"]) - """Endpoint URL for AOAI model. Required.""" + """Endpoint targetURI for AOAI model. Required.""" api_key: str = rest_field(name="apiKey", visibility=["read", "create", "update", "delete", "query"]) """API Key for AOAI model. Required.""" azure_deployment: str = rest_field( @@ -656,9 +660,10 @@ class ToolDefinition(_model_base.Model): """An abstract representation of an input tool definition that an agent can use. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingGroundingToolDefinition, - CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, FileSearchToolDefinition, - FunctionToolDefinition, OpenApiToolDefinition, SharepointToolDefinition + AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingCustomSearchToolDefinition, + BingGroundingToolDefinition, CodeInterpreterToolDefinition, ConnectedAgentToolDefinition, + MicrosoftFabricToolDefinition, FileSearchToolDefinition, FunctionToolDefinition, + OpenApiToolDefinition, SharepointToolDefinition :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -870,6 +875,43 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="azure_function", **kwargs) +class BingCustomSearchToolDefinition(ToolDefinition, discriminator="bing_custom_search"): + """The input definition information for a Bing custom search tool as used to configure an agent. + + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: The list of search configurations used by the bing custom search + tool. Required. + :vartype bing_custom_search: ~azure.ai.projects.models.SearchConfigurationList + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: "_models.SearchConfigurationList" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The list of search configurations used by the bing custom search tool. Required.""" + + @overload + def __init__( + self, + *, + bing_custom_search: "_models.SearchConfigurationList", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class BingGroundingToolDefinition(ToolDefinition, discriminator="bing_grounding"): """The input definition information for a bing grounding search tool as used to configure an agent. @@ -975,6 +1017,83 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class ConnectedAgentDetails(_model_base.Model): + """Information for connecting one agent to another as a tool. + + :ivar id: The identifier of the child agent. Required. + :vartype id: str + :ivar name: The name of the agent to be called. Required. + :vartype name: str + :ivar description: A description of what the agent does, used by the model to choose when and + how to call the agent. Required. + :vartype description: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identifier of the child agent. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent to be called. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the agent does, used by the model to choose when and how to call the + agent. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + description: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ConnectedAgentToolDefinition(ToolDefinition, discriminator="connected_agent"): + """The input definition information for a connected agent tool which defines a domain specific + sub-agent. + + :ivar type: The object type, which is always 'connected_agent'. Required. Default value is + "connected_agent". + :vartype type: str + :ivar connected_agent: The sub-agent to connect. Required. + :vartype connected_agent: ~azure.ai.projects.models.ConnectedAgentDetails + """ + + type: Literal["connected_agent"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'connected_agent'. Required. Default value is + \"connected_agent\".""" + connected_agent: "_models.ConnectedAgentDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The sub-agent to connect. Required.""" + + @overload + def __init__( + self, + *, + connected_agent: "_models.ConnectedAgentDetails", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="connected_agent", **kwargs) + + class CredentialsApiKeyAuth(_model_base.Model): """The credentials needed for API key authentication. @@ -2159,7 +2278,7 @@ class MAASModelConfig(TargetModelConfig, discriminator="MAAS"): :ivar type: Required. Default value is "MAAS". :vartype type: str - :ivar azure_endpoint: Endpoint URL for MAAS model. Required. + :ivar azure_endpoint: Endpoint targetURI for MAAS model. Required. :vartype azure_endpoint: str :ivar api_key: API Key for MAAS model. Required. :vartype api_key: str @@ -2168,7 +2287,7 @@ class MAASModelConfig(TargetModelConfig, discriminator="MAAS"): type: Literal["MAAS"] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """Required. Default value is \"MAAS\".""" azure_endpoint: str = rest_field(name="azureEndpoint", visibility=["read", "create", "update", "delete", "query"]) - """Endpoint URL for MAAS model. Required.""" + """Endpoint targetURI for MAAS model. Required.""" api_key: str = rest_field(name="apiKey", visibility=["read", "create", "update", "delete", "query"]) """API Key for MAAS model. Required.""" @@ -2873,6 +2992,80 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class MessageImageFileParam(_model_base.Model): + """Defines how an internally uploaded image file is referenced when creating an image-file block. + + :ivar file_id: The ID of the previously uploaded image file. Required. + :vartype file_id: str + :ivar detail: Optional detail level for the image (auto, low, or high). Known values are: + "auto", "low", and "high". + :vartype detail: str or ~azure.ai.projects.models.ImageDetailLevel + """ + + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the previously uploaded image file. Required.""" + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional detail level for the image (auto, low, or high). Known values are: \"auto\", \"low\", + and \"high\".""" + + @overload + def __init__( + self, + *, + file_id: str, + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MessageImageUrlParam(_model_base.Model): + """Defines how an external image URL is referenced when creating an image-URL block. + + :ivar url: The publicly accessible URL of the external image. Required. + :vartype url: str + :ivar detail: Optional detail level for the image (auto, low, or high). Defaults to 'auto' if + not specified. Known values are: "auto", "low", and "high". + :vartype detail: str or ~azure.ai.projects.models.ImageDetailLevel + """ + + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The publicly accessible URL of the external image. Required.""" + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional detail level for the image (auto, low, or high). Defaults to 'auto' if not specified. + Known values are: \"auto\", \"low\", and \"high\".""" + + @overload + def __init__( + self, + *, + url: str, + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class MessageIncompleteDetails(_model_base.Model): """Information providing additional detail about a message entering an incomplete status. @@ -2907,6 +3100,146 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class MessageInputContentBlock(_model_base.Model): + """Defines a single content block when creating a message. The 'type' field determines whether it + is text, an image file, or an external image URL, etc. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MessageInputImageFileBlock, MessageInputImageUrlBlock, MessageInputTextBlock + + :ivar type: Specifies which kind of content block this is (text, image_file, image_url, etc.). + Required. Known values are: "text", "image_file", and "image_url". + :vartype type: str or ~azure.ai.projects.models.MessageBlockType + """ + + __mapping__: Dict[str, _model_base.Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Specifies which kind of content block this is (text, image_file, image_url, etc.). Required. + Known values are: \"text\", \"image_file\", and \"image_url\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MessageInputImageFileBlock(MessageInputContentBlock, discriminator="image_file"): + """An image-file block in a new message, referencing an internally uploaded image by file ID. + + :ivar type: Must be 'image_file' for an internally uploaded image block. Required. Indicates a + block referencing an internally uploaded image file. + :vartype type: str or ~azure.ai.projects.models.IMAGE_FILE + :ivar image_file: Information about the referenced image file, including file ID and optional + detail level. Required. + :vartype image_file: ~azure.ai.projects.models.MessageImageFileParam + """ + + type: Literal[MessageBlockType.IMAGE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'image_file' for an internally uploaded image block. Required. Indicates a block + referencing an internally uploaded image file.""" + image_file: "_models.MessageImageFileParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about the referenced image file, including file ID and optional detail level. + Required.""" + + @overload + def __init__( + self, + *, + image_file: "_models.MessageImageFileParam", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.IMAGE_FILE, **kwargs) + + +class MessageInputImageUrlBlock(MessageInputContentBlock, discriminator="image_url"): + """An image-URL block in a new message, referencing an external image by URL. + + :ivar type: Must be 'image_url' for an externally hosted image block. Required. Indicates a + block referencing an external image URL. + :vartype type: str or ~azure.ai.projects.models.IMAGE_URL + :ivar image_url: Information about the external image URL, including the URL and optional + detail level. Required. + :vartype image_url: ~azure.ai.projects.models.MessageImageUrlParam + """ + + type: Literal[MessageBlockType.IMAGE_URL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'image_url' for an externally hosted image block. Required. Indicates a block + referencing an external image URL.""" + image_url: "_models.MessageImageUrlParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about the external image URL, including the URL and optional detail level. + Required.""" + + @overload + def __init__( + self, + *, + image_url: "_models.MessageImageUrlParam", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.IMAGE_URL, **kwargs) + + +class MessageInputTextBlock(MessageInputContentBlock, discriminator="text"): + """A text block in a new message, containing plain text content. + + :ivar type: Must be 'text' for a text block. Required. Indicates a block containing text + content. + :vartype type: str or ~azure.ai.projects.models.TEXT + :ivar text: The plain text content for this block. Required. + :vartype text: str + """ + + type: Literal[MessageBlockType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'text' for a text block. Required. Indicates a block containing text content.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The plain text content for this block. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.TEXT, **kwargs) + + class MessageTextAnnotation(_model_base.Model): """An abstract representation of an annotation to text thread message content. @@ -3421,6 +3754,56 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.object: Literal["list"] = "list" +class OpenAIPageableListOfAgentThread(_model_base.Model): + """The response data for a requested list of items. + + :ivar object: The object type, which is always list. Required. Default value is "list". + :vartype object: str + :ivar data: The requested list of items. Required. + :vartype data: list[~azure.ai.projects.models.AgentThread] + :ivar first_id: The first ID represented in this list. Required. + :vartype first_id: str + :ivar last_id: The last ID represented in this list. Required. + :vartype last_id: str + :ivar has_more: A value indicating whether there are additional values available not captured + in this list. Required. + :vartype has_more: bool + """ + + object: Literal["list"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always list. Required. Default value is \"list\".""" + data: List["_models.AgentThread"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The requested list of items. Required.""" + first_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The first ID represented in this list. Required.""" + last_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The last ID represented in this list. Required.""" + has_more: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A value indicating whether there are additional values available not captured in this list. + Required.""" + + @overload + def __init__( + self, + *, + data: List["_models.AgentThread"], + first_id: str, + last_id: str, + has_more: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["list"] = "list" + + class OpenAIPageableListOfRunStep(_model_base.Model): """The response data for a requested list of items. @@ -3805,6 +4188,8 @@ class OpenApiFunctionDefinition(_model_base.Model): :vartype spec: any :ivar auth: Open API authentication details. Required. :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3816,6 +4201,8 @@ class OpenApiFunctionDefinition(_model_base.Model): """The openapi function shape, described as a JSON Schema object. Required.""" auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Open API authentication details. Required.""" + default_params: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" @overload def __init__( @@ -3825,6 +4212,7 @@ def __init__( spec: Any, auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, + default_params: Optional[List[str]] = None, ) -> None: ... @overload @@ -4255,7 +4643,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class RunCompletionUsage(_model_base.Model): """Usage statistics related to the run. This value will be ``null`` if the run is not in a - terminal state (i.e. ``in_progress``\\ , ``queued``\\ , etc.). + terminal state (i.e. ``in_progress``, ``queued``, etc.). :ivar completion_tokens: Number of completion tokens used over the course of the run. Required. :vartype completion_tokens: int @@ -4461,9 +4849,9 @@ class RunStepToolCall(_model_base.Model): existing run. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - RunStepAzureAISearchToolCall, RunStepBingGroundingToolCall, RunStepCodeInterpreterToolCall, - RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, RunStepFunctionToolCall, - RunStepSharepointToolCall + RunStepAzureAISearchToolCall, RunStepCustomSearchToolCall, RunStepBingGroundingToolCall, + RunStepCodeInterpreterToolCall, RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, + RunStepFunctionToolCall, RunStepOpenAPIToolCall, RunStepSharepointToolCall :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -4823,6 +5211,46 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class RunStepCustomSearchToolCall(RunStepToolCall, discriminator="bing_custom_search"): + """A record of a call to a bing custom search tool, issued by the model in evaluation of a defined + tool, that represents + executed search with bing custom search. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: Reserved for future use. Required. + :vartype bing_custom_search: dict[str, str] + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + bing_custom_search: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class RunStepDelta(_model_base.Model): """Represents the delta payload in a streaming run step delta chunk. @@ -5802,6 +6230,44 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="fabric_dataagent", **kwargs) +class RunStepOpenAPIToolCall(RunStepToolCall, discriminator="openapi"): + """A record of a call to an OpenAPI tool, issued by the model in evaluation of a defined tool, + that represents + executed OpenAPI operations. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'openapi'. Required. Default value is "openapi". + :vartype type: str + :ivar open_api: Reserved for future use. Required. + :vartype open_api: dict[str, str] + """ + + type: Literal["openapi"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required. Default value is \"openapi\".""" + open_api: Dict[str, str] = rest_field(name="openapi", visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + open_api: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="openapi", **kwargs) + + class RunStepSharepointToolCall(RunStepToolCall, discriminator="sharepoint_grounding"): """A record of a call to a SharePoint tool, issued by the model in evaluation of a defined tool, that represents @@ -5878,6 +6344,72 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type=RunStepType.TOOL_CALLS, **kwargs) +class SearchConfiguration(_model_base.Model): + """A custom search configuration. + + :ivar connection_id: A connection in a ToolConnectionList attached to this tool. Required. + :vartype connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + """ + + connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A connection in a ToolConnectionList attached to this tool. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + + @overload + def __init__( + self, + *, + connection_id: str, + instance_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SearchConfigurationList(_model_base.Model): + """A list of search configurations currently used by the ``bing_custom_search`` tool. + + :ivar search_configurations: The connections attached to this tool. There can be a maximum of 1 + connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.SearchConfiguration] + """ + + search_configurations: List["_models.SearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + search_configurations: List["_models.SearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class SharepointToolDefinition(ToolDefinition, discriminator="sharepoint_grounding"): """The input definition information for a sharepoint tool as used to configure an agent. @@ -6167,22 +6699,20 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ThreadMessageOptions(_model_base.Model): - """A single message within an agent thread, as provided during that thread's creation for its - initial state. + """A single message within an agent thread, + as provided during that thread's creation for its initial state. :ivar role: The role of the entity that is creating the message. Allowed values include: - - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: "user" and "assistant". + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Required. Known values are: "user" and + "assistant". :vartype role: str or ~azure.ai.projects.models.MessageRole - :ivar content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :vartype content: str + :ivar content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Required. Is either a str type or a [MessageInputContentBlock] type. + :vartype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :ivar attachments: A list of files attached to the message, and the tools they should be added to. :vartype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -6194,18 +6724,15 @@ class ThreadMessageOptions(_model_base.Model): role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The role of the entity that is creating the message. Allowed values include: - - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: \"user\" and \"assistant\". - """ - content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The textual content of the initial message. Currently, robust input including images and - annotated text may only be provided via - a separate call to the create message API. Required.""" + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Required. Known values are: \"user\" and + \"assistant\".""" + content: "_types.MessageInputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Required. Is either a str type or a [MessageInputContentBlock] type.""" attachments: Optional[List["_models.MessageAttachment"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -6220,7 +6747,7 @@ def __init__( self, *, role: Union[str, "_models.MessageRole"], - content: str, + content: "_types.MessageInputContent", attachments: Optional[List["_models.MessageAttachment"]] = None, metadata: Optional[Dict[str, str]] = None, ) -> None: ... @@ -6284,7 +6811,7 @@ class ThreadRun(_model_base.Model): not incomplete. Required. :vartype incomplete_details: ~azure.ai.projects.models.IncompleteRunDetails :ivar usage: Usage statistics related to the run. This value will be ``null`` if the run is not - in a terminal state (i.e. ``in_progress``\\ , ``queued``\\ , etc.). Required. + in a terminal state (i.e. ``in_progress``, ``queued``, etc.). Required. :vartype usage: ~azure.ai.projects.models.RunCompletionUsage :ivar temperature: The sampling temperature used for this run. If not set, defaults to 1. :vartype temperature: float @@ -6613,9 +7140,9 @@ class TruncationObject(_model_base.Model): context window of the run. :ivar type: The truncation strategy to use for the thread. The default is ``auto``. If set to - ``last_messages``\\ , the thread will + ``last_messages``, the thread will be truncated to the ``lastMessages`` count most recent messages in the thread. When set to - ``auto``\\ , messages in the middle of the thread + ``auto``, messages in the middle of the thread will be dropped to fit the context length of the model, ``max_prompt_tokens``. Required. Known values are: "auto" and "last_messages". :vartype type: str or ~azure.ai.projects.models.TruncationStrategy @@ -6630,7 +7157,7 @@ class TruncationObject(_model_base.Model): """The truncation strategy to use for the thread. The default is ``auto``. If set to ``last_messages``, the thread will be truncated to the ``lastMessages`` count most recent messages in the thread. When set to - ``auto`` , messages in the middle of the thread + ``auto``, messages in the middle of the thread will be dropped to fit the context length of the model, ``max_prompt_tokens``. Required. Known values are: \"auto\" and \"last_messages\".""" last_messages: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6767,6 +7294,52 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class UploadFileRequest(_model_base.Model): + """UploadFileRequest. + + :ivar file: The file data, in bytes. Required. + :vartype file: ~azure.ai.projects._vendor.FileType + :ivar purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Required. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". + :vartype purpose: str or ~azure.ai.projects.models.FilePurpose + :ivar filename: The name of the file. + :vartype filename: str + """ + + file: FileType = rest_field( + visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True + ) + """The file data, in bytes. Required.""" + purpose: Union[str, "_models.FilePurpose"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The intended purpose of the uploaded file. Use ``assistants`` for Agents and Message files, + ``vision`` for Agents image file inputs, ``batch`` for Batch API, and ``fine-tune`` for + Fine-tuning. Required. Known values are: \"fine-tune\", \"fine-tune-results\", \"assistants\", + \"assistants_output\", \"batch\", \"batch_output\", and \"vision\".""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file.""" + + @overload + def __init__( + self, + *, + file: FileType, + purpose: Union[str, "_models.FilePurpose"], + filename: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class VectorStore(_model_base.Model): """A vector store is a collection of processed files can be used by the ``file_search`` tool. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 205b2ff5ff70..62fe1890d7cb 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio import base64 import datetime import inspect @@ -51,6 +51,7 @@ AzureFunctionStorageQueue, AzureFunctionToolDefinition, AzureFunctionBinding, + BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, @@ -71,6 +72,8 @@ RequiredFunctionToolCall, RunStep, RunStepDeltaChunk, + SearchConfiguration, + SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, ThreadRun, @@ -690,7 +693,6 @@ def _get_func_and_args(self, tool_call: RequiredFunctionToolCall) -> Tuple[Any, arguments = tool_call.function.arguments if function_name not in self._functions: - logging.error("Function '%s' not found.", function_name) raise ValueError(f"Function '{function_name}' not found.") function = self._functions[function_name] @@ -698,11 +700,9 @@ def _get_func_and_args(self, tool_call: RequiredFunctionToolCall) -> Tuple[Any, try: parsed_arguments = json.loads(arguments) except json.JSONDecodeError as e: - logging.error("Invalid JSON arguments for function '%s': %s", function_name, e) raise ValueError(f"Invalid JSON arguments: {e}") from e if not isinstance(parsed_arguments, dict): - logging.error("Arguments must be a JSON object for function '%s'.", function_name) raise TypeError("Arguments must be a JSON object.") return function, parsed_arguments @@ -731,11 +731,10 @@ def resources(self) -> ToolResources: class FunctionTool(BaseFunctionTool): def execute(self, tool_call: RequiredFunctionToolCall) -> Any: - function, parsed_arguments = self._get_func_and_args(tool_call) - try: + function, parsed_arguments = self._get_func_and_args(tool_call) return function(**parsed_arguments) if parsed_arguments else function() - except TypeError as e: + except Exception as e: error_message = f"Error executing function '{tool_call.function.name}': {e}" logging.error(error_message) # Return error message as JSON string back to agent in order to make possible self @@ -746,13 +745,12 @@ def execute(self, tool_call: RequiredFunctionToolCall) -> Any: class AsyncFunctionTool(BaseFunctionTool): async def execute(self, tool_call: RequiredFunctionToolCall) -> Any: # pylint: disable=invalid-overridden-method - function, parsed_arguments = self._get_func_and_args(tool_call) - try: + function, parsed_arguments = self._get_func_and_args(tool_call) if inspect.iscoroutinefunction(function): return await function(**parsed_arguments) if parsed_arguments else await function() return function(**parsed_arguments) if parsed_arguments else function() - except TypeError as e: + except Exception as e: error_message = f"Error executing function '{tool_call.function.name}': {e}" logging.error(error_message) # Return error message as JSON string back to agent in order to make possible self correction @@ -781,7 +779,7 @@ def __init__( :type index_connection_id: str :param index_name: Name of Index in search resource to be used by tool. :type index_name: str - :param query_type: Type of query in an AIIndexResource attached to this agent. + :param query_type: Type of query in an AIIndexResource attached to this agent. Default value is AzureAISearchQueryType.SIMPLE. :type query_type: AzureAISearchQueryType :param filter: Odata filter string for search resource. @@ -834,20 +832,35 @@ class OpenApiTool(Tool[OpenApiToolDefinition]): this class also supports adding and removing additional API definitions dynamically. """ - def __init__(self, name: str, description: str, spec: Any, auth: OpenApiAuthDetails): + def __init__( + self, + name: str, + description: str, + spec: Any, + auth: OpenApiAuthDetails, + default_parameters: Optional[List[str]] = None, + ) -> None: """ Constructor initializes the tool with a primary API definition. :param name: The name of the API. + :type name: str :param description: The API description. + :type description: str :param spec: The API specification. + :type spec: Any :param auth: Authentication details for the API. :type auth: OpenApiAuthDetails + :param default_parameters: List of OpenAPI spec parameters that will use user-provided defaults. + :type default_parameters: Optional[List[str]] """ + default_params: List[str] = [] if default_parameters is None else default_parameters self._default_auth = auth self._definitions: List[OpenApiToolDefinition] = [ OpenApiToolDefinition( - openapi=OpenApiFunctionDefinition(name=name, description=description, spec=spec, auth=auth) + openapi=OpenApiFunctionDefinition( + name=name, description=description, spec=spec, auth=auth, default_params=default_params + ) ) ] @@ -861,7 +874,14 @@ def definitions(self) -> List[OpenApiToolDefinition]: """ return self._definitions - def add_definition(self, name: str, description: str, spec: Any, auth: Optional[OpenApiAuthDetails] = None) -> None: + def add_definition( + self, + name: str, + description: str, + spec: Any, + auth: Optional[OpenApiAuthDetails] = None, + default_parameters: Optional[List[str]] = None, + ) -> None: """ Adds a new API definition dynamically. Raises a ValueError if a definition with the same name already exists. @@ -875,8 +895,12 @@ def add_definition(self, name: str, description: str, spec: Any, auth: Optional[ :param auth: Optional authentication details for this particular API definition. If not provided, the tool's default authentication details will be used. :type auth: Optional[OpenApiAuthDetails] + :param default_parameters: List of OpenAPI spec parameters that will use user-provided defaults. + :type default_parameters: List[str] :raises ValueError: If a definition with the same name exists. """ + default_params: List[str] = [] if default_parameters is None else default_parameters + # Check if a definition with the same name exists. if any(definition.openapi.name == name for definition in self._definitions): raise ValueError(f"Definition '{name}' already exists and cannot be added again.") @@ -885,7 +909,9 @@ def add_definition(self, name: str, description: str, spec: Any, auth: Optional[ auth_to_use = auth if auth is not None else self._default_auth new_definition = OpenApiToolDefinition( - openapi=OpenApiFunctionDefinition(name=name, description=description, spec=spec, auth=auth_to_use) + openapi=OpenApiFunctionDefinition( + name=name, description=description, spec=spec, auth=auth_to_use, default_params=default_params + ) ) self._definitions.append(new_definition) @@ -1020,6 +1046,46 @@ def definitions(self) -> List[BingGroundingToolDefinition]: return [BingGroundingToolDefinition(bing_grounding=ToolConnectionList(connection_list=self.connection_ids))] +class BingCustomSearchTool(Tool[BingCustomSearchToolDefinition]): + """ + A tool that searches for information using Bing Custom Search. + """ + + def __init__(self, connection_id: str, instance_name: str): + """ + Initialize Bing Custom Search with a connection_id. + + :param connection_id: Connection ID used by tool. Bing Custom Search tools allow only one connection. + :param instance_name: Config instance name used by tool. + """ + self.connection_ids = [SearchConfiguration(connection_id=connection_id, instance_name=instance_name)] + + @property + def definitions(self) -> List[BingCustomSearchToolDefinition]: + """ + Get the Bing grounding tool definitions. + + :rtype: List[ToolDefinition] + """ + return [ + BingCustomSearchToolDefinition( + bing_custom_search=SearchConfigurationList(search_configurations=self.connection_ids) + ) + ] + + @property + def resources(self) -> ToolResources: + """ + Get the connection tool resources. + + :rtype: ToolResources + """ + return ToolResources() + + def execute(self, tool_call: Any) -> Any: + pass + + class FabricTool(ConnectionTool[MicrosoftFabricToolDefinition]): """ A tool that searches for information using Microsoft Fabric. @@ -1312,7 +1378,8 @@ def execute_tool_calls(self, tool_calls: List[Any]) -> Any: } tool_outputs.append(tool_output) except Exception as e: # pylint: disable=broad-exception-caught - logging.error("Failed to execute tool call %s: %s", tool_call, e) + tool_output = {"tool_call_id": tool_call.id, "output": str(e)} + tool_outputs.append(tool_output) return tool_outputs @@ -1855,6 +1922,7 @@ def get_last_text_message_by_role(self, role: MessageRole) -> Optional[MessageTe "FileSearchTool", "FunctionTool", "OpenApiTool", + "BingCustomSearchTool", "BingGroundingTool", "StreamEventData", "SharepointTool", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index bacb9513b236..480b7afc8931 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -9,7 +9,7 @@ from io import IOBase import json import sys -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TYPE_CHECKING, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TYPE_CHECKING, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -33,7 +33,7 @@ from .._configuration import AIProjectClientConfiguration from .._model_base import SdkJSONEncoder, _deserialize from .._serialization import Deserializer, Serializer -from .._vendor import FileType, prepare_multipart_form_data +from .._vendor import prepare_multipart_form_data if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -279,6 +279,40 @@ def build_agents_delete_thread_request(thread_id: str, **kwargs: Any) -> HttpReq return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) +def build_agents_list_threads_request( + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/threads" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if limit is not None: + _params["limit"] = _SERIALIZER.query("limit", limit, "int") + if order is not None: + _params["order"] = _SERIALIZER.query("order", order, "str") + if after is not None: + _params["after"] = _SERIALIZER.query("after", after, "str") + if before is not None: + _params["before"] = _SERIALIZER.query("before", before, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + def build_agents_create_message_request(thread_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -778,7 +812,7 @@ def build_agents_get_file_content_request(file_id: str, **kwargs: Any) -> HttpRe _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01-preview")) - accept = _headers.pop("Accept", "application/json") + accept = _headers.pop("Accept", "application/octet-stream") # Construct URL _url = "/files/{fileId}/content" @@ -1731,12 +1765,16 @@ def create_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1822,12 +1860,16 @@ def list_agents( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1887,12 +1929,16 @@ def get_agent(self, agent_id: str, **kwargs: Any) -> _models.Agent: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2138,12 +2184,16 @@ def update_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2203,12 +2253,16 @@ def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDeletionSta params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2368,12 +2422,16 @@ def create_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2433,12 +2491,16 @@ def get_thread(self, thread_id: str, **kwargs: Any) -> _models.AgentThread: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2601,12 +2663,16 @@ def update_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2666,12 +2732,16 @@ def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDeletion params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2701,13 +2771,108 @@ def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDeletion return deserialized # type: ignore + @distributed_trace + def list_threads( + self, + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIPageableListOfAgentThread: + """Gets a list of threads that were previously created. + + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order + and desc for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.ListSortOrder + :keyword after: A cursor for use in pagination. after is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the + list. Default value is None. + :paramtype after: str + :keyword before: A cursor for use in pagination. before is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of + the list. Default value is None. + :paramtype before: str + :return: OpenAIPageableListOfAgentThread. The OpenAIPageableListOfAgentThread is compatible + with MutableMapping + :rtype: ~azure.ai.projects.models.OpenAIPageableListOfAgentThread + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.OpenAIPageableListOfAgentThread] = kwargs.pop("cls", None) + + _request = build_agents_list_threads_request( + limit=limit, + order=order, + after=after, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), + "resourceGroupName": self._serialize.url( + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.OpenAIPageableListOfAgentThread, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload def create_message( self, thread_id: str, *, role: Union[str, _models.MessageRole], - content: str, + content: "_types.MessageInputContent", content_type: str = "application/json", attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, @@ -2718,18 +2883,16 @@ def create_message( :param thread_id: Identifier of the thread. Required. :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2789,7 +2952,7 @@ def create_message( body: Union[JSON, IO[bytes]] = _Unset, *, role: Union[str, _models.MessageRole] = _Unset, - content: str = _Unset, + content: "_types.MessageInputContent" = _Unset, attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -2801,18 +2964,16 @@ def create_message( :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword attachments: A list of files attached to the message, and the tools they should be added to. Default value is None. :paramtype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -2862,12 +3023,16 @@ def create_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2961,12 +3126,16 @@ def list_messages( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3029,12 +3198,16 @@ def get_message(self, thread_id: str, message_id: str, **kwargs: Any) -> _models params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3194,12 +3367,16 @@ def update_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3284,8 +3461,8 @@ def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3454,8 +3631,8 @@ def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3564,12 +3741,16 @@ def create_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3659,12 +3840,16 @@ def list_runs( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3727,12 +3912,16 @@ def get_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.ThreadR params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3892,12 +4081,16 @@ def update_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4069,12 +4262,16 @@ def submit_tool_outputs_to_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4137,12 +4334,16 @@ def cancel_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.Thre params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4217,8 +4418,8 @@ def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -4351,8 +4552,8 @@ def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -4459,12 +4660,16 @@ def create_thread_and_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4544,12 +4749,16 @@ def get_run_step( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4650,12 +4859,16 @@ def list_run_steps( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4719,12 +4932,16 @@ def list_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4755,59 +4972,16 @@ def list_files( return deserialized # type: ignore @overload - def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ - + def _upload_file(self, body: _models._models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: ... @overload - def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :param body: Required. - :type body: JSON - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ + def _upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: ... @distributed_trace - def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any - ) -> _models.OpenAIFile: + def _upload_file(self, body: Union[_models._models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models._models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -4825,13 +4999,6 @@ def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -4845,12 +5012,16 @@ def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4910,12 +5081,16 @@ def delete_file(self, file_id: str, **kwargs: Any) -> _models.FileDeletionStatus params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4975,12 +5150,16 @@ def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5011,13 +5190,13 @@ def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: return deserialized # type: ignore @distributed_trace - def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: + def _get_file_content(self, file_id: str, **kwargs: Any) -> Iterator[bytes]: """Retrieves the raw content of a specific file. :param file_id: The ID of the file to retrieve. Required. :type file_id: str - :return: bytes - :rtype: bytes + :return: Iterator[bytes] + :rtype: Iterator[bytes] :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5031,7 +5210,7 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[bytes] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_agents_get_file_content_request( file_id=file_id, @@ -5040,16 +5219,20 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = kwargs.pop("stream", True) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5065,10 +5248,7 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(bytes, response.json(), format="base64") + deserialized = response.iter_bytes() if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5131,12 +5311,16 @@ def list_vector_stores( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5317,12 +5501,16 @@ def create_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5382,12 +5570,16 @@ def get_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models.Vecto params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5548,12 +5740,16 @@ def modify_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5614,12 +5810,16 @@ def delete_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models.Ve params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5714,12 +5914,16 @@ def list_vector_store_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5876,12 +6080,16 @@ def create_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5944,12 +6152,16 @@ def get_vector_store_file(self, vector_store_id: str, file_id: str, **kwargs: An params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6017,12 +6229,16 @@ def delete_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6179,12 +6395,16 @@ def create_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6249,12 +6469,16 @@ def get_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6320,12 +6544,16 @@ def cancel_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6424,12 +6652,16 @@ def list_vector_store_file_batch_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6504,12 +6736,16 @@ def _get_workspace(self, **kwargs: Any) -> _models._models.GetWorkspaceResponse: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6587,12 +6823,16 @@ def _list_connections( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6654,12 +6894,16 @@ def _get_connection(self, connection_name: str, **kwargs: Any) -> _models._model params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6755,12 +6999,16 @@ def _get_connection_with_secrets( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6845,12 +7093,16 @@ def _get_app_insights( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6930,12 +7182,16 @@ def get(self, id: str, **kwargs: Any) -> _models.Evaluation: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7056,12 +7312,16 @@ def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwarg params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7131,14 +7391,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7156,14 +7420,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7171,7 +7439,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Evaluation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -7297,12 +7565,16 @@ def update( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7368,12 +7640,16 @@ def get_schedule(self, name: str, **kwargs: Any) -> _models.EvaluationSchedule: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7511,12 +7787,16 @@ def create_or_replace_schedule( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7591,14 +7871,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7616,14 +7900,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7631,7 +7919,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized["value"]) + list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -7683,12 +7971,16 @@ def disable_schedule(self, name: str, **kwargs: Any) -> None: # pylint: disable params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index 0db1e7c1a4b8..9cbaa76152b7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -24,6 +24,8 @@ Sequence, TextIO, Union, + Callable, + Set, cast, overload, ) @@ -841,7 +843,7 @@ class AgentsOperations(AgentsOperationsGenerated): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self._toolset: Dict[str, _models.ToolSet] = {} + self._function_tool = _models.FunctionTool(set()) # pylint: disable=arguments-differ @overload @@ -1073,8 +1075,6 @@ def create_agent( **kwargs, ) - if toolset is not None: - self._toolset[new_agent.id] = toolset return new_agent # pylint: disable=arguments-differ @@ -1327,7 +1327,6 @@ def update_agent( return super().update_agent(body=body, **kwargs) if toolset is not None: - self._toolset[agent_id] = toolset tools = toolset.definitions tool_resources = toolset.resources @@ -1823,11 +1822,9 @@ def create_and_process_run( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = toolset or self._toolset.get(run.agent_id) - if toolset is not None: - tool_outputs = toolset.execute_tool_calls(tool_calls) - else: - raise ValueError("Toolset is not available in the client.") + toolset = _models.ToolSet() + toolset.add(self._function_tool) + tool_outputs = toolset.execute_tool_calls(tool_calls) logging.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2520,13 +2517,14 @@ def _handle_submit_tool_outputs(self, run: _models.ThreadRun, event_handler: _mo # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. - if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = self._toolset.get(run.agent_id) - if toolset: - tool_outputs = toolset.execute_tool_calls(tool_calls) - else: - logger.debug("Toolset is not available in the client.") - return + if ( + any(tool_call.type == "function" for tool_call in tool_calls) + and len(self._function_tool.definitions) > 0 + ): + + toolset = _models.ToolSet() + toolset.add(self._function_tool) + tool_outputs = toolset.execute_tool_calls(tool_calls) logger.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2605,8 +2603,8 @@ def upload_file( :keyword file_path: Path to the file. Required if `body` and `purpose` are not provided. :paramtype file_path: Optional[str] :keyword purpose: Known values are: "fine-tune", "fine-tune-results", "assistants", - :paramtype purpose: Union[str, _models.FilePurpose, None] "assistants_output", "batch", "batch_output", and "vision". Required if `body` and `file` are not provided. + :paramtype purpose: Union[str, _models.FilePurpose, None] :keyword filename: The name of the file. :paramtype filename: Optional[str] :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping @@ -2615,15 +2613,19 @@ def upload_file( :raises IOError: If there are issues with reading the file. :raises: HttpResponseError for HTTP errors. """ + # If a JSON body is provided directly, pass it along if body is not None: - return super().upload_file(body=body, **kwargs) + return super()._upload_file(body=body, **kwargs) + # Convert FilePurpose enum to string if necessary if isinstance(purpose, FilePurpose): purpose = purpose.value + # If file content is passed in directly if file is not None and purpose is not None: - return super().upload_file(file=file, purpose=purpose, filename=filename, **kwargs) + return super()._upload_file(body={"file": file, "purpose": purpose, "filename": filename}, **kwargs) + # If a file path is provided if file_path is not None and purpose is not None: if not os.path.isfile(file_path): raise FileNotFoundError(f"The file path provided does not exist: {file_path}") @@ -2632,11 +2634,11 @@ def upload_file( with open(file_path, "rb") as f: content = f.read() - # Determine filename and create correct FileType + # If no explicit filename is provided, use the base name base_filename = filename or os.path.basename(file_path) file_content: FileType = (base_filename, content) - return super().upload_file(file=file_content, purpose=purpose, **kwargs) + return super()._upload_file(body={"file": file_content, "purpose": purpose}, **kwargs) except IOError as e: raise IOError(f"Unable to read file: {file_path}") from e @@ -3304,10 +3306,57 @@ def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDeletionSta :rtype: ~azure.ai.projects.models.AgentDeletionStatus :raises ~azure.core.exceptions.HttpResponseError: """ - if agent_id in self._toolset: - del self._toolset[agent_id] return super().delete_agent(agent_id, **kwargs) + @overload + def enable_auto_function_calls(self, *, functions: Set[Callable[..., Any]]) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + """ + + @overload + def enable_auto_function_calls(self, *, function_tool: _models.FunctionTool) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword function_tool: A FunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.FunctionTool] + """ + + @overload + def enable_auto_function_calls(self, *, toolset: _models.ToolSet) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword toolset: A ToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.ToolSet] + """ + + @distributed_trace + def enable_auto_function_calls( + self, + *, + functions: Optional[Set[Callable[..., Any]]] = None, + function_tool: Optional[_models.FunctionTool] = None, + toolset: Optional[_models.ToolSet] = None, + ) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + :keyword function_tool: A FunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.FunctionTool] + :keyword toolset: A ToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.ToolSet] + """ + if functions: + self._function_tool = _models.FunctionTool(functions) + elif function_tool: + self._function_tool = function_tool + elif toolset: + tool = toolset.get_tool(_models.FunctionTool) + self._function_tool = tool + __all__: List[str] = [ "AgentsOperations", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py new file mode 100644 index 000000000000..f1e98bf1be1a --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py @@ -0,0 +1,16 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=unused-import +try: + import prompty # pylint: disable=unused-import +except ImportError as exc: + raise ImportError( + "The 'prompty' package is required to use the 'azure.ai.projects.prompts' module. " + "Please install it by running 'pip install prompty'." + ) from exc + +from ._patch import patch_sdk as _patch_sdk, PromptTemplate + +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py new file mode 100644 index 000000000000..13fd07bcac99 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py @@ -0,0 +1,124 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=line-too-long,R,no-member +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +import traceback +from pathlib import Path +from typing import Any, Dict, List, Optional +from typing_extensions import Self +from prompty import headless, load, prepare +from prompty.core import Prompty +from ._utils import remove_leading_empty_space + + +class PromptTemplate: + """The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. + Prompty library is required to be installed to use this class. + """ + + @classmethod + def from_prompty(cls, file_path: str) -> Self: + """Initialize a PromptTemplate object from a prompty file. + + :param file_path: The path to the prompty file. + :type file_path: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + if not file_path: + raise ValueError("Please provide file_path") + + # Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because: + # In the stack, the last function is the current function. + # The second last function is the caller function, which is the root of the file_path. + stack = traceback.extract_stack() + caller = Path(stack[-2].filename) + abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute() + + prompty = load(str(abs_file_path)) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + return cls(prompty=prompty) + + @classmethod + def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self: + """Initialize a PromptTemplate object from a message template. + + :param prompt_template: The prompt template string. + :type prompt_template: str + :param api: The API type, e.g. "chat" or "completion". + :type api: str + :param model_name: The model name, e.g. "gpt-4o-mini". + :type model_name: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + prompt_template = remove_leading_empty_space(prompt_template) + prompty = headless(api=api, content=prompt_template) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + prompty.template.parser = "prompty" + return cls( + api=api, + model_name=model_name, + prompty=prompty, + ) + + def __init__( + self, + *, + api: str = "chat", + prompty: Optional[Prompty] = None, + prompt_template: Optional[str] = None, + model_name: Optional[str] = None, + ) -> None: + self.prompty = prompty + if self.prompty is not None: + self.model_name = ( + self.prompty.model.configuration["azure_deployment"] + if "azure_deployment" in self.prompty.model.configuration + else None + ) + self.parameters = self.prompty.model.parameters + self._config = {} + elif prompt_template is not None: + self.model_name = model_name + self.parameters = {} + # _config is a dict to hold the internal configuration + self._config = { + "api": api if api is not None else "chat", + "prompt_template": prompt_template, + } + else: + raise ValueError("Please pass valid arguments for PromptTemplate") + + def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]: + """Render the prompt template with the given data. + + :param data: The data to render the prompt template with. + :type data: Optional[Dict[str, Any]] + :return: The rendered prompt template. + :rtype: List[Dict[str, Any]] + """ + if data is None: + data = kwargs + + if self.prompty is not None: + parsed = prepare(self.prompty, data) + return parsed # type: ignore + else: + raise ValueError("Please provide valid prompt template") + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py new file mode 100644 index 000000000000..a85e193322e5 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import sys + + +def remove_leading_empty_space(multiline_str: str) -> str: + """ + Processes a multiline string by: + 1. Removing empty lines + 2. Finding the minimum leading spaces + 3. Indenting all lines to the minimum level + + :param multiline_str: The input multiline string. + :type multiline_str: str + :return: The processed multiline string. + :rtype: str + """ + lines = multiline_str.splitlines() + start_index = 0 + while start_index < len(lines) and lines[start_index].strip() == "": + start_index += 1 + + # Find the minimum number of leading spaces + min_spaces = sys.maxsize + for line in lines[start_index:]: + if len(line.strip()) == 0: + continue + spaces = len(line) - len(line.lstrip()) + spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces + min_spaces = min(min_spaces, spaces) + + # Remove leading spaces and indent to the minimum level + processed_lines = [] + for line in lines[start_index:]: + processed_lines.append(line[min_spaces:]) + + return "\n".join(processed_lines) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py index 1890a6f1e88d..7a1284e88af5 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. # ------------------------------------ import functools -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio from typing import Any, Callable, Optional, Dict try: diff --git a/sdk/ai/azure-ai-projects/dev_requirements.txt b/sdk/ai/azure-ai-projects/dev_requirements.txt index 0b28efcde9bc..89df890b9973 100644 --- a/sdk/ai/azure-ai-projects/dev_requirements.txt +++ b/sdk/ai/azure-ai-projects/dev_requirements.txt @@ -8,3 +8,4 @@ openai opentelemetry-sdk opentelemetry-exporter-otlp-proto-grpc azure-ai-ml +prompty diff --git a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py index a57cb6cb93fc..2e2f33a6fad0 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py @@ -40,6 +40,7 @@ async def main() -> None: toolset = AsyncToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], diff --git a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py index 781c9fe4b190..041bc8fcd499 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py @@ -70,6 +70,7 @@ async def main() -> None: toolset = AsyncToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(functions=user_async_functions) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/samples/agents/image_file.png b/sdk/ai/azure-ai-projects/samples/agents/image_file.png new file mode 100644 index 000000000000..50ae6c65367a Binary files /dev/null and b/sdk/ai/azure-ai-projects/samples/agents/image_file.png differ diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py index 6f1b99c28915..a01d7d9060e4 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py @@ -332,6 +332,7 @@ def process_request(self, request: str) -> None: ) print(f"Created and processed run for agent '{agent.name}', run ID: {run.id}") messages = self._project_client.agents.list_messages(thread_id=self._agent_thread.id) + print(messages) text_message = messages.get_last_text_message_by_role(role=MessageRole.AGENT) if text_message and text_message.text: print(f"Agent '{agent.name}' completed task. " f"Outcome: {text_message.text.value}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py index 184a7ef20ca5..034a7d7acf94 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py @@ -22,7 +22,7 @@ import os from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from agent_team import AgentTeam +from agent_team import AgentTeam, _create_task from agent_trace_configurator import AgentTraceConfigurator project_client = AIProjectClient.from_connection_string( @@ -35,6 +35,7 @@ if model_deployment_name is not None: AgentTraceConfigurator(project_client=project_client).setup_tracing() with project_client: + project_client.agents.enable_auto_function_calls(functions={_create_task}) agent_team = AgentTeam("test_team", project_client=project_client) agent_team.add_agent( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py index 62a270799eee..039ca3071543 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py @@ -73,6 +73,8 @@ def create_task(team_name: str, recipient: str, request: str, requestor: str) -> agent_team = AgentTeam("test_team", project_client=project_client) toolset = ToolSet() toolset.add(default_function_tool) + + project_client.agents.enable_auto_function_calls(toolset=toolset) agent_team.set_team_leader( model=model_deployment_name, name="TeamLeader", diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py index ae9c05323d22..0f10a464e6ba 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py @@ -42,6 +42,12 @@ model_deployment_name = os.getenv("MODEL_DEPLOYMENT_NAME") +project_client.agents.enable_auto_function_calls( + function_tool=FunctionTool( + {fetch_current_datetime, fetch_weather, send_email_using_recipient_name, convert_temperature} + ) +) + if model_deployment_name is not None: AgentTraceConfigurator(project_client=project_client).setup_tracing() with project_client: diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py index 0d76e18a0a8c..5f81c81c6423 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py @@ -52,11 +52,7 @@ # Initialize agent AI search tool and add the search index connection id ai_search = AzureAISearchTool( - index_connection_id=conn_id, - index_name="sample_index", - query_type=AzureAISearchQueryType.SIMPLE, - top_k=3, - filter="" + index_connection_id=conn_id, index_name="sample_index", query_type=AzureAISearchQueryType.SIMPLE, top_k=3, filter="" ) # Create agent with AI search tool and process assistant run diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py index f18f1425a031..7a3d260ff623 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py @@ -50,6 +50,11 @@ # [END create_thread] print(f"Created thread, thread ID: {thread.id}") + # List all threads for the agent + # [START list_threads] + threads = project_client.agents.list_threads() + # [END list_threads] + # [START create_message] message = project_client.agents.create_message(thread_id=thread.id, role="user", content="Hello, tell me a joke") # [END create_message] @@ -80,4 +85,3 @@ print(f"{data_point.role}: {last_message_content.text.value}") # [END list_messages] - print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py new file mode 100644 index 000000000000..aed44c7747f0 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py @@ -0,0 +1,92 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use agent operations with the Bing Custom Search tool from + the Azure Agents service using a synchronous client. + For more information on the Bing Custom Search tool, see: https://aka.ms/AgentCustomSearchDoc + +USAGE: + python sample_agents_bing_custom_search.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. + 3) BING_CUSTOM_CONNECTION_NAME - The connection name of the Bing Custom Search connection, as found in the + "Connected resources" tab in your Azure AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import MessageRole, BingCustomSearchTool +from azure.identity import DefaultAzureCredential + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +# [START create_agent_with_bing_custom_search_tool] +bing_custom_connection = project_client.connections.get(connection_name=os.environ["BING_CUSTOM_CONNECTION_NAME"]) +conn_id = bing_custom_connection.id + +print(conn_id) + +# Initialize agent bing custom search tool and add the connection id +bing_custom_tool = BingCustomSearchTool(connection_id=conn_id, instance_name="") + +# Create agent with the bing custom search tool and process assistant run +with project_client: + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are a helpful assistant", + tools=bing_custom_tool.definitions, + headers={"x-ms-enable-preview": "true"}, + ) + # [END create_agent_with_bing_custom_search_tool] + + print(f"Created agent, ID: {agent.id}") + + # Create thread for communication + thread = project_client.agents.create_thread() + print(f"Created thread, ID: {thread.id}") + + # Create message to thread + message = project_client.agents.create_message( + thread_id=thread.id, + role=MessageRole.USER, + content="How many medals did the USA win in the 2024 summer olympics?", + ) + print(f"Created message, ID: {message.id}") + + # Create and process agent run in thread with tools + run = project_client.agents.create_and_process_run(thread_id=thread.id, agent_id=agent.id) + print(f"Run finished with status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + # Delete the assistant when done + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + # Print the Agent's response message with optional citation + response_message = project_client.agents.list_messages(thread_id=thread.id).get_last_message_by_role( + MessageRole.AGENT + ) + if response_message: + for text_message in response_message.text_messages: + print(f"Agent response: {text_message.text.value}") + for annotation in response_message.url_citation_annotations: + print(f"URL Citation: [{annotation.url_citation.title}]({annotation.url_citation.url})") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_base64.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_base64.py new file mode 100644 index 000000000000..aba75b1fd064 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_base64.py @@ -0,0 +1,110 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use basic agent operations using image file input for the + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_image_input_base64.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. +""" + +import os, time, base64 +from typing import List +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import ( + MessageTextContent, + MessageInputContentBlock, + MessageImageUrlParam, + MessageInputTextBlock, + MessageInputImageUrlBlock, +) + + +def image_to_base64(image_path: str) -> str: + """ + Convert an image file to a Base64-encoded string. + + :param image_path: The path to the image file (e.g. 'image_file.png') + :return: A Base64-encoded string representing the image. + :raises FileNotFoundError: If the provided file path does not exist. + :raises OSError: If there's an error reading the file. + """ + if not os.path.isfile(image_path): + raise FileNotFoundError(f"File not found at: {image_path}") + + try: + with open(image_path, "rb") as image_file: + file_data = image_file.read() + return base64.b64encode(file_data).decode("utf-8") + except Exception as exc: + raise OSError(f"Error reading file '{image_path}'") from exc + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + input_message = "Hello, what is in the image ?" + image_base64 = image_to_base64("image_file.png") + img_url = f"data:image/png;base64,{image_base64}" + url_param = MessageImageUrlParam(url=img_url, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageUrlBlock(image_url=url_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py new file mode 100644 index 000000000000..2ba4a1ba1c78 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py @@ -0,0 +1,91 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use basic agent operations using image file input for the + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_image_input_file.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. +""" + +import os, time +from typing import List +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import ( + MessageTextContent, + MessageInputContentBlock, + MessageImageFileParam, + MessageInputTextBlock, + MessageInputImageFileBlock, +) + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + image_file = project_client.agents.upload_file_and_poll(file_path="image_file.png", purpose="assistants") + print(f"Uploaded file, file ID: {image_file.id}") + + input_message = "Hello, what is in the image ?" + file_param = MessageImageFileParam(file_id=image_file.id, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageFileBlock(image_file=file_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py new file mode 100644 index 000000000000..fd09bd81ce21 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py @@ -0,0 +1,90 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use basic agent operations using image url input for the + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_image_input_url.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. +""" + +import os, time +from typing import List +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import ( + MessageTextContent, + MessageInputContentBlock, + MessageImageUrlParam, + MessageInputTextBlock, + MessageInputImageUrlBlock, +) + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + input_message = "Hello, what is in the image ?" + url_param = MessageImageUrlParam(url=image_url, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageUrlBlock(image_url=url_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py index efb0691e8de5..e3edf7243c26 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py @@ -48,10 +48,17 @@ # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth + name="get_weather", + spec=openapi_weather, + description="Retrieve weather information for a location", + auth=auth, + default_parameters=["format"], ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth + name="get_countries", + spec=openapi_countries, + description="Retrieve a list of countries", + auth=auth, ) # Create agent with OpenApi tool and process assistant run @@ -62,7 +69,6 @@ instructions="You are a helpful assistant", tools=openapi_tool.definitions, ) - # [END create_agent_with_openapi] print(f"Created agent, ID: {agent.id}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py index 7a231513bbd6..84e496023a30 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -36,7 +37,7 @@ import jsonref from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme +from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme project_client = AIProjectClient.from_connection_string( @@ -50,22 +51,21 @@ print(connection.id) -with open('./tripadvisor_openapi.json', 'r') as f: +with open("./tripadvisor_openapi.json", "r") as f: openapi_spec = jsonref.loads(f.read()) # Create Auth object for the OpenApiTool (note that connection or managed identity auth setup requires additional setup in Azure) auth = OpenApiConnectionAuthDetails(security_scheme=OpenApiConnectionSecurityScheme(connection_id=connection.id)) # Initialize an Agent OpenApi tool using the read in OpenAPI spec -openapi = OpenApiTool(name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth) +openapi = OpenApiTool( + name="get_location_reviews", spec=openapi_spec, description="Retrieve reviews for a given location", auth=auth +) # Create an Agent with OpenApi tool and process Agent run with project_client: agent = project_client.agents.create_agent( - model=model_name, - name="my-agent", - instructions="You are a helpful agent", - tools=openapi.definitions + model=model_name, name="my-agent", instructions="You are a helpful agent", tools=openapi.definitions ) print(f"Created agent, ID: {agent.id}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py index 5f734cefa31b..51c3f2a90993 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py @@ -44,6 +44,9 @@ toolset.add(functions) toolset.add(code_interpreter) + # To enable tool calls executed automatically + project_client.agents.enable_auto_function_calls(toolset=toolset) + agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py index c727d11f5913..1b61f348b4c5 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py index a42b1949d31e..ad709cf9c1ad 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py @@ -77,6 +77,7 @@ def on_unhandled_event(self, event_type: str, event_data: Any) -> None: functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py index 3b3248cb204f..a3bcc4663ca1 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py index 52c8151a7d12..ee9010a02054 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py @@ -44,6 +44,7 @@ toolset.add(functions) with project_client: + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty b/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty new file mode 100644 index 000000000000..6dbcbf40bc6f --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty @@ -0,0 +1,30 @@ +--- +name: Basic Prompt +description: A basic prompt that uses the GPT-3 chat API to answer questions +authors: + - author_1 + - author_2 +model: + api: chat + configuration: + azure_deployment: gpt-4o-mini + parameters: + temperature: 1 + frequency_penalty: 0.5 + presence_penalty: 0.5 +--- +system: +You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + +# context +{{#rules}} +{{rule}} +{{/rules}} + +{{#chat_history}} +{{role}}: +{{content}} +{{/chat_history}} + +user: +{{input}} diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py new file mode 100644 index 000000000000..46064dab76df --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -0,0 +1,74 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ChatCompletionsClient from the azure.ai.inference package, and then work with a prompt string. + For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + * PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project. + * MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.prompts import PromptTemplate +from azure.ai.inference.models import UserMessage +from azure.identity import DefaultAzureCredential + +project_connection_string = os.environ["PROJECT_CONNECTION_STRING"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=project_connection_string, +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + prompt_template_str = """ + system: + You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + + # context + {{#rules}} + {{rule}} + {{/rules}} + + {{#chat_history}} + {{role}}: + {{content}} + {{/chat_history}} + + user: + {{input}} + """ + prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py new file mode 100644 index 000000000000..c30393246f5f --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py @@ -0,0 +1,57 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ChatCompletionsClient from the azure.ai.inference package, and then work with Prompty. + For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompty.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + * PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project. + * MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.prompts import PromptTemplate +from azure.ai.inference.models import UserMessage +from azure.identity import DefaultAzureCredential + +project_connection_string = os.environ["PROJECT_CONNECTION_STRING"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=project_connection_string, +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + path = "./sample1.prompty" + prompt_template = PromptTemplate.from_prompty(file_path=path) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/setup.py b/sdk/ai/azure-ai-projects/setup.py index 06b311209988..28b6a92413c5 100644 --- a/sdk/ai/azure-ai-projects/setup.py +++ b/sdk/ai/azure-ai-projects/setup.py @@ -100,4 +100,7 @@ "typing-extensions>=4.12.2", ], python_requires=">=3.8", + extras_require={ + "prompts": ["prompty"], + }, ) diff --git a/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py b/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py index 34863ac43717..8686492e1050 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py +++ b/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression import io import json import unittest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py index fd68eeb3d427..d03d934b4679 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression from typing import Iterator, List from unittest.mock import Mock, patch import pytest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py index f721502f5b5d..d48d6a2e20f7 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression from typing import Any, AsyncIterator, List from unittest.mock import AsyncMock, patch import pytest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py index 4a1b8e00217a..76675ef5a884 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py @@ -213,12 +213,20 @@ def _assert_tool_call(self, submit_tool_mock: MagicMock, run_id: str, tool_set: else: submit_tool_mock.assert_not_called() - def _assert_toolset_dict(self, project_client: AIProjectClient, agent_id: str, toolset: Optional[ToolSet]): - """Check that the tool set dictionary state is as expected.""" - if toolset is None: - assert agent_id not in project_client.agents._toolset - else: - assert project_client.agents._toolset.get(agent_id) is not None + def _set_toolcalls( + self, project_client: AgentsOperations, toolset1: Optional[ToolSet], toolset2: Optional[ToolSet] + ) -> None: + """Set the tool calls for the agent.""" + if toolset1 and toolset2: + function_in_toolset1 = set(toolset1.get_tool(tool_type=FunctionTool)._functions.values()) + function_in_toolset2 = set(toolset2.get_tool(tool_type=FunctionTool)._functions.values()) + function_tool = FunctionTool(function_in_toolset1) + function_tool.add_functions(function_in_toolset2) + project_client.enable_auto_function_calls(function_tool=function_tool) + elif toolset1: + project_client.enable_auto_function_calls(toolset=toolset1) + elif toolset2: + project_client.enable_auto_function_calls(toolset=toolset2) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -261,6 +269,7 @@ def test_multiple_agents_create( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() with project_client: + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Check that pipelines are created as expected. agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -283,16 +292,9 @@ def test_multiple_agents_create( project_client.agents.create_and_process_run(thread_id="some_thread_id", agent_id=agent2.id) self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run456", toolset2) - # Check the contents of a toolset - self._assert_toolset_dict(project_client, agent1.id, toolset1) - self._assert_toolset_dict(project_client, agent2.id, toolset2) # Check that we cleanup tools after deleting agent. project_client.agents.delete_agent(agent1.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, toolset2) project_client.agents.delete_agent(agent2.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, None) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -333,12 +335,11 @@ def test_update_agent_tools( instructions="You are a helpful assistant", toolset=toolset1, ) - self._assert_toolset_dict(project_client, agent1.id, toolset1) - project_client.agents.update_agent(agent1.id, toolset=toolset2) + agent1 = project_client.agents.update_agent(agent1.id, toolset=toolset2) if toolset2 is None: - self._assert_toolset_dict(project_client, agent1.id, toolset1) + assert agent1.tools == None else: - self._assert_toolset_dict(project_client, agent1.id, toolset2) + assert agent1.tools[0].function.name == function2.__name__ @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -382,6 +383,7 @@ def test_create_run_tools_override( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset1, toolset2) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -389,7 +391,6 @@ def test_create_run_tools_override( toolset=toolset1, ) self._assert_pipeline_and_reset(mock_pipeline._pipeline.run, tool_set=toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) # Create run with new tool set, which also can be none. project_client.agents.create_and_process_run( @@ -399,7 +400,6 @@ def test_create_run_tools_override( self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset2) else: self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -437,6 +437,7 @@ def test_with_azure_function( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -499,6 +500,7 @@ def test_handle_submit_tool_outputs( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -545,9 +547,8 @@ async def test_create_stream_with_tool_calls(self, mock_submit_tool_outputs_to_r functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) - operation = AgentsOperations() - operation._toolset = {"asst_01": toolset} + operation.enable_auto_function_calls(toolset=toolset) count = 0 with operation.create_stream(thread_id="thread_id", agent_id="asst_01") as stream: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py index d3ab2004c423..d45b1612d594 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py @@ -215,12 +215,20 @@ def _assert_tool_call(self, submit_tool_mock: AsyncMock, run_id: str, tool_set: else: submit_tool_mock.assert_not_called() - def _assert_toolset_dict(self, project_client: AIProjectClient, agent_id: str, toolset: Optional[AsyncToolSet]): - """Check that the tool set dictionary state is as expected.""" - if toolset is None: - assert agent_id not in project_client.agents._toolset - else: - assert project_client.agents._toolset.get(agent_id) is not None + def _set_toolcalls( + self, project_client: AgentsOperations, toolset1: Optional[AsyncToolSet], toolset2: Optional[AsyncToolSet] + ) -> None: + """Set the tool calls for the agent.""" + if toolset1 and toolset2: + function_in_toolset1 = set(toolset1.get_tool(tool_type=AsyncFunctionTool)._functions.values()) + function_in_toolset2 = set(toolset2.get_tool(tool_type=AsyncFunctionTool)._functions.values()) + function_tool = AsyncFunctionTool(function_in_toolset1) + function_tool.add_functions(function_in_toolset2) + project_client.enable_auto_function_calls(function_tool=function_tool) + elif toolset1: + project_client.enable_auto_function_calls(toolset=toolset1) + elif toolset2: + project_client.enable_auto_function_calls(toolset=toolset2) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -264,6 +272,7 @@ async def test_multiple_agents_create( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() async with project_client: + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Check that pipelines are created as expected. agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -286,16 +295,6 @@ async def test_multiple_agents_create( await project_client.agents.create_and_process_run(thread_id="some_thread_id", agent_id=agent2.id) self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run456", toolset2) - # Check the contents of a toolset - self._assert_toolset_dict(project_client, agent1.id, toolset1) - self._assert_toolset_dict(project_client, agent2.id, toolset2) - # Check that we cleanup tools after deleting agent. - await project_client.agents.delete_agent(agent1.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, toolset2) - await project_client.agents.delete_agent(agent2.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, None) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -337,12 +336,11 @@ async def test_update_agent_tools( instructions="You are a helpful assistant", toolset=toolset1, ) - self._assert_toolset_dict(project_client, agent1.id, toolset1) - await project_client.agents.update_agent(agent1.id, toolset=toolset2) + agent1 = await project_client.agents.update_agent(agent1.id, toolset=toolset2) if toolset2 is None: - self._assert_toolset_dict(project_client, agent1.id, toolset1) + assert agent1.tools == None else: - self._assert_toolset_dict(project_client, agent1.id, toolset2) + assert agent1.tools[0].function.name == function2.__name__ @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -394,8 +392,8 @@ async def test_create_run_tools_override( toolset=toolset1, ) self._assert_pipeline_and_reset(mock_pipeline._pipeline.run, tool_set=toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Create run with new tool set, which also can be none. await project_client.agents.create_and_process_run( thread_id="some_thread_id", agent_id=agent1.id, toolset=toolset2 @@ -404,7 +402,6 @@ async def test_create_run_tools_override( self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset2) else: self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -441,8 +438,10 @@ async def test_with_azure_function( mock_pipeline._pipeline.run.return_value = mock_pipeline_response mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() + async with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -505,6 +504,7 @@ async def test_handle_submit_tool_outputs( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() async with project_client: + self._set_toolcalls(project_client.agents, toolset, None) # Check that pipelines are created as expected. agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -554,7 +554,7 @@ async def test_create_stream_with_tool_calls(self, mock_submit_tool_outputs_to_r toolset.add(functions) operation = AgentsOperations() - operation._toolset = {"asst_01": toolset} + operation.enable_auto_function_calls(toolset=toolset) count = 0 async with await operation.create_stream(thread_id="thread_id", agent_id="asst_01") as stream: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py b/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py index 1b60f28d302a..cf0e0ad0ca59 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py @@ -33,7 +33,7 @@ AgentEventHandler, AgentStreamEvent, AgentThread, - AzureAISearchTool, + AzureAISearchTool, AzureFunctionStorageQueue, AzureFunctionTool, CodeInterpreterTool, @@ -90,7 +90,7 @@ azure_ai_projects_agents_tests_data_path="azureml://subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rg-resour-cegr-oupfoo1/workspaces/abcd-abcdabcdabcda-abcdefghijklm/datastores/workspaceblobstore/paths/LocalUpload/000000000000/product_info_1.md", azure_ai_projects_agents_tests_storage_queue="https://foobar.queue.core.windows.net", azure_ai_projects_agents_tests_search_index_name="sample_index", - azure_ai_projects_agents_tests_search_connection_name="search_connection_name" + azure_ai_projects_agents_tests_search_connection_name="search_connection_name", ) @@ -2817,50 +2817,50 @@ def test_azure_ai_search_tool(self, **kwargs): with self.create_client(**kwargs) as client: assert isinstance(client, AIProjectClient) - # Create AzureAISearchTool - connection_name = kwargs.pop("azure_ai_projects_agents_tests_search_connection_name", "my-search-connection-name") + # Create AzureAISearchTool + connection_name = kwargs.pop( + "azure_ai_projects_agents_tests_search_connection_name", "my-search-connection-name" + ) connection = client.connections.get(connection_name=connection_name) conn_id = connection.id index_name = kwargs.pop("azure_ai_projects_agents_tests_search_index_name", "my-search-index") - + azure_search_tool = AzureAISearchTool( index_connection_id=conn_id, - index_name=index_name, + index_name=index_name, ) - + # Create agent with the search tool agent = client.agents.create_agent( model="gpt-4o", name="search-agent", instructions="You are a helpful assistant that can search for information using Azure AI Search.", tools=azure_search_tool.definitions, - tool_resources=azure_search_tool.resources + tool_resources=azure_search_tool.resources, ) assert agent.id print(f"Created agent with ID: {agent.id}") - + # Create thread thread = client.agents.create_thread() assert thread.id print(f"Created thread with ID: {thread.id}") - + # Create message message = client.agents.create_message( - thread_id=thread.id, - role="user", - content="Search for information about iPhone prices." + thread_id=thread.id, role="user", content="Search for information about iPhone prices." ) assert message.id print(f"Created message with ID: {message.id}") - + # Create and process run run = client.agents.create_and_process_run(thread_id=thread.id, agent_id=agent.id) assert run.status == RunStatus.COMPLETED, run.last_error.message - + # List messages to verify tool was used messages = client.agents.list_messages(thread_id=thread.id) assert len(messages.data) > 0 - + # Clean up client.agents.delete_agent(agent.id) print("Deleted agent") diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py b/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py index 422e2b7b1595..c460957fb54d 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression import unittest import pytest from azure.ai.projects.operations import AgentsOperations diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py index 2eca8bd339ab..db43e4536d71 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py @@ -54,16 +54,6 @@ _utils._span_impl_type = settings.tracing_implementation() -# TODO - remove when https://github.com/Azure/azure-sdk-for-python/issues/40086 is fixed -class FakeToolSetDict(dict): - def __init__(self, toolset=None, *args, **kwargs): - super().__init__(*args, **kwargs) - self.toolset = toolset - - def get(self, k, default=None): - return self.toolset - - class TestAiAgentsInstrumentor(AzureRecordedTestCase): """Tests for AI agents instrumentor.""" @@ -501,7 +491,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message( @@ -724,7 +714,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message(thread_id=thread.id, role="user", content="Времето в София?") @@ -826,12 +816,14 @@ def fetch_weather(location: str) -> str: toolset.add(functions) client = self.create_client(**kwargs) + client.agents.enable_auto_function_calls(toolset=toolset) + agent = client.agents.create_agent( model="gpt-4o", name="my-agent", instructions="You are helpful agent", toolset=toolset ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message( thread_id=thread.id, role="user", content="What is the weather in New York?" diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py index 804a4fc287fb..4e89c66ade13 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py @@ -53,16 +53,6 @@ content_tracing_initial_value = os.getenv(CONTENT_TRACING_ENV_VARIABLE) -# TODO - remove when https://github.com/Azure/azure-sdk-for-python/issues/40086 is fixed -class FakeToolSetDict(dict): - def __init__(self, toolset=None, *args, **kwargs): - super().__init__(*args, **kwargs) - self.toolset = toolset - - def get(self, k, default=None): - return self.toolset - - class TestAiAgentsInstrumentor(AzureRecordedTestCase): """Tests for AI agents instrumentor.""" @@ -437,12 +427,14 @@ def fetch_weather(location: str) -> str: toolset.add(functions) client = self.create_client(**kwargs) + client.agents.enable_auto_function_calls(toolset=toolset) + agent = await client.agents.create_agent( model="gpt-4o", name="my-agent", instructions="You are helpful agent", toolset=toolset ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = await client.agents.create_thread() message = await client.agents.create_message( @@ -662,7 +654,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = await client.agents.create_thread() message = await client.agents.create_message( diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml index 08082bba7677..95a2db669486 100644 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/Azure.AI.Projects -commit: 47ce1b47aef1e5351de994508e32e6ac39128011 +commit: 66f3f5a1184215abf25d93f185b55dfbc75b0050 repo: Azure/azure-rest-api-specs additionalDirectories: diff --git a/shared_requirements.txt b/shared_requirements.txt index 0cf1400ece52..46cecc1c7e82 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -75,4 +75,5 @@ httpx pandas nltk azure-monitor-opentelemetry -pyrit \ No newline at end of file +pyrit +prompty