From da4921d7de2670c9bf27869d235fc3a7a3823bdd Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Fri, 28 Mar 2025 14:01:30 -0700 Subject: [PATCH 01/13] Set version --- sdk/ai/azure-ai-projects/CHANGELOG.md | 9 +++++++++ sdk/ai/azure-ai-projects/azure/ai/projects/_version.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index e6f3442fb358..0b966e20fa07 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -1,5 +1,14 @@ # Release History +## 1.0.0b9 (Unreleased) + +### Features added + +### Sample updates + +### Bugs Fixed + + ## 1.0.0b8 (2025-03-28) ### Features added diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py index ca67f288ad6b..b1c2836b6921 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b8" +VERSION = "1.0.0b9" From 75ae5da02aabbef7350a38204913ee098de6ac79 Mon Sep 17 00:00:00 2001 From: David Wu Date: Tue, 1 Apr 2025 00:50:10 -0700 Subject: [PATCH 02/13] Utilities to load prompt template strings and Prompty (#40287) * Add Prompty as dependency * Ignore linters * Fix linter issues * Address PR review comments --- .vscode/cspell.json | 1 + sdk/ai/azure-ai-projects/CHANGELOG.md | 1 + .../azure/ai/projects/prompts/__init__.py | 15 +++ .../azure/ai/projects/prompts/_patch.py | 123 ++++++++++++++++++ .../azure/ai/projects/prompts/_utils.py | 39 ++++++ sdk/ai/azure-ai-projects/dev_requirements.txt | 1 + .../samples/inference/sample1.prompty | 30 +++++ ...e_ai_inference_client_and_prompt_string.py | 75 +++++++++++ ...h_azure_ai_inference_client_and_prompty.py | 59 +++++++++ sdk/ai/azure-ai-projects/setup.py | 3 + shared_requirements.txt | 3 +- 11 files changed, 349 insertions(+), 1 deletion(-) create mode 100644 sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py create mode 100644 sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py create mode 100644 sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py create mode 100644 sdk/ai/azure-ai-projects/samples/inference/sample1.prompty create mode 100644 sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py create mode 100644 sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 404795164d2e..f78f612efa2f 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -377,6 +377,7 @@ "prebuilts", "premf", "prevsnapshot", + "prompty", "pschema", "PSECRET", "pydantic", diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 0b966e20fa07..f1742d8d10e3 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -3,6 +3,7 @@ ## 1.0.0b9 (Unreleased) ### Features added +* Utilities to load prompt template strings and Prompty file content ### Sample updates diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py new file mode 100644 index 000000000000..0b712abea098 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py @@ -0,0 +1,15 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=unused-import +try: + import prompty # pylint: disable=unused-import +except ImportError as exc: + raise ImportError( + "The 'prompty' package is required to use the 'azure.ai.projects.prompts' module. " + "Please install it by running 'pip install prompty'." + ) from exc + +from ._patch import patch_sdk as _patch_sdk, PromptTemplate +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py new file mode 100644 index 000000000000..166fcceb2f03 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py @@ -0,0 +1,123 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=line-too-long,R,no-member +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +import traceback +from pathlib import Path +from typing import Any, Dict, List, Optional +from typing_extensions import Self +from prompty import headless, load, prepare +from prompty.core import Prompty +from ._utils import remove_leading_empty_space + + +class PromptTemplate: + """The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. + Prompty library is required to be installed to use this class. + """ + + @classmethod + def from_prompty(cls, file_path: str) -> Self: + """Initialize a PromptTemplate object from a prompty file. + + :param file_path: The path to the prompty file. + :type file_path: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + if not file_path: + raise ValueError("Please provide file_path") + + # Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because: + # In the stack, the last function is the current function. + # The second last function is the caller function, which is the root of the file_path. + stack = traceback.extract_stack() + caller = Path(stack[-2].filename) + abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute() + + prompty = load(str(abs_file_path)) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + return cls(prompty=prompty) + + @classmethod + def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self: + """Initialize a PromptTemplate object from a message template. + + :param prompt_template: The prompt template string. + :type prompt_template: str + :param api: The API type, e.g. "chat" or "completion". + :type api: str + :param model_name: The model name, e.g. "gpt-4o-mini". + :type model_name: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + prompt_template = remove_leading_empty_space(prompt_template) + prompty = headless(api=api, content=prompt_template) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + prompty.template.parser = "prompty" + return cls( + api=api, + model_name=model_name, + prompty=prompty, + ) + + def __init__( + self, + *, + api: str = "chat", + prompty: Optional[Prompty] = None, + prompt_template: Optional[str] = None, + model_name: Optional[str] = None, + ) -> None: + self.prompty = prompty + if self.prompty is not None: + self.model_name = ( + self.prompty.model.configuration["azure_deployment"] + if "azure_deployment" in self.prompty.model.configuration + else None + ) + self.parameters = self.prompty.model.parameters + self._config = {} + elif prompt_template is not None: + self.model_name = model_name + self.parameters = {} + # _config is a dict to hold the internal configuration + self._config = { + "api": api if api is not None else "chat", + "prompt_template": prompt_template, + } + else: + raise ValueError("Please pass valid arguments for PromptTemplate") + + def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]: + """Render the prompt template with the given data. + + :param data: The data to render the prompt template with. + :type data: Optional[Dict[str, Any]] + :return: The rendered prompt template. + :rtype: List[Dict[str, Any]] + """ + if data is None: + data = kwargs + + if self.prompty is not None: + parsed = prepare(self.prompty, data) + return parsed # type: ignore + else: + raise ValueError("Please provide valid prompt template") + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py new file mode 100644 index 000000000000..a85e193322e5 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import sys + + +def remove_leading_empty_space(multiline_str: str) -> str: + """ + Processes a multiline string by: + 1. Removing empty lines + 2. Finding the minimum leading spaces + 3. Indenting all lines to the minimum level + + :param multiline_str: The input multiline string. + :type multiline_str: str + :return: The processed multiline string. + :rtype: str + """ + lines = multiline_str.splitlines() + start_index = 0 + while start_index < len(lines) and lines[start_index].strip() == "": + start_index += 1 + + # Find the minimum number of leading spaces + min_spaces = sys.maxsize + for line in lines[start_index:]: + if len(line.strip()) == 0: + continue + spaces = len(line) - len(line.lstrip()) + spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces + min_spaces = min(min_spaces, spaces) + + # Remove leading spaces and indent to the minimum level + processed_lines = [] + for line in lines[start_index:]: + processed_lines.append(line[min_spaces:]) + + return "\n".join(processed_lines) diff --git a/sdk/ai/azure-ai-projects/dev_requirements.txt b/sdk/ai/azure-ai-projects/dev_requirements.txt index 0b28efcde9bc..89df890b9973 100644 --- a/sdk/ai/azure-ai-projects/dev_requirements.txt +++ b/sdk/ai/azure-ai-projects/dev_requirements.txt @@ -8,3 +8,4 @@ openai opentelemetry-sdk opentelemetry-exporter-otlp-proto-grpc azure-ai-ml +prompty diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty b/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty new file mode 100644 index 000000000000..6dbcbf40bc6f --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample1.prompty @@ -0,0 +1,30 @@ +--- +name: Basic Prompt +description: A basic prompt that uses the GPT-3 chat API to answer questions +authors: + - author_1 + - author_2 +model: + api: chat + configuration: + azure_deployment: gpt-4o-mini + parameters: + temperature: 1 + frequency_penalty: 0.5 + presence_penalty: 0.5 +--- +system: +You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + +# context +{{#rules}} +{{rule}} +{{/rules}} + +{{#chat_history}} +{{role}}: +{{content}} +{{/chat_history}} + +user: +{{input}} diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py new file mode 100644 index 000000000000..355daa72f0e6 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -0,0 +1,75 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ChatCompletionsClient from the azure.ai.inference package, and then work with a prompt string. + For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + * PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project. + * MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.prompts import PromptTemplate +from azure.ai.inference.models import UserMessage +from azure.identity import DefaultAzureCredential + +project_connection_string = os.environ["PROJECT_CONNECTION_STRING"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=project_connection_string, +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + prompt_template_str = """ + system: + You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + + # context + {{#rules}} + {{rule}} + {{/rules}} + + {{#chat_history}} + {{role}}: + {{content}} + {{/chat_history}} + + user: + {{input}} + """ + prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + + response = client.complete( + model=model_deployment_name, messages=messages + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py new file mode 100644 index 000000000000..8c9a0f3a34a8 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py @@ -0,0 +1,59 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to get an authenticated + async ChatCompletionsClient from the azure.ai.inference package, and then work with Prompty. + For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompty.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + * PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project. + * MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.prompts import PromptTemplate +from azure.ai.inference.models import UserMessage +from azure.identity import DefaultAzureCredential + +project_connection_string = os.environ["PROJECT_CONNECTION_STRING"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=project_connection_string, +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + path = "./sample1.prompty" + prompt_template = PromptTemplate.from_prompty(file_path=path) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + + response = client.complete( + model=model_deployment_name, messages=messages + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/setup.py b/sdk/ai/azure-ai-projects/setup.py index 06b311209988..28b6a92413c5 100644 --- a/sdk/ai/azure-ai-projects/setup.py +++ b/sdk/ai/azure-ai-projects/setup.py @@ -100,4 +100,7 @@ "typing-extensions>=4.12.2", ], python_requires=">=3.8", + extras_require={ + "prompts": ["prompty"], + }, ) diff --git a/shared_requirements.txt b/shared_requirements.txt index 8aed1a7b0ae3..4d3e6f59a637 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -72,4 +72,5 @@ promptflow-core promptflow-devkit nltk azure-monitor-opentelemetry -pyrit \ No newline at end of file +pyrit +prompty From 47eea99f863dfc0ba8186980cd15a3043bb6ae83 Mon Sep 17 00:00:00 2001 From: Glenn Harper Date: Wed, 2 Apr 2025 13:21:07 -0700 Subject: [PATCH 03/13] auto-genned code --- .../azure/ai/projects/models/__init__.py | 12 + .../azure/ai/projects/models/_enums.py | 2 + .../azure/ai/projects/models/_models.py | 258 +++++++++++++++++- .../sample_agents_openapi_connection_auth.py | 9 +- 4 files changed, 270 insertions(+), 11 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a44d43c67e3b..a3e6b9287cd0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -29,6 +29,7 @@ AzureFunctionDefinition, AzureFunctionStorageQueue, AzureFunctionToolDefinition, + BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, @@ -115,6 +116,7 @@ RunStepCodeInterpreterToolCallDetails, RunStepCodeInterpreterToolCallOutput, RunStepCompletionUsage, + RunStepCustomSearchToolCall, RunStepDelta, RunStepDeltaChunk, RunStepDeltaCodeInterpreterDetailItemObject, @@ -141,9 +143,12 @@ RunStepMessageCreationDetails, RunStepMessageCreationReference, RunStepMicrosoftFabricToolCall, + RunStepOpenAPIToolCall, RunStepSharepointToolCall, RunStepToolCall, RunStepToolCallDetails, + SearchConfiguration, + SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, SubmitToolOutputsDetails, @@ -163,6 +168,7 @@ UpdateCodeInterpreterToolResourceOptions, UpdateFileSearchToolResourceOptions, UpdateToolResourcesOptions, + UploadFileRequest, VectorStore, VectorStoreAutoChunkingStrategyRequest, VectorStoreAutoChunkingStrategyResponse, @@ -244,6 +250,7 @@ "AzureFunctionDefinition", "AzureFunctionStorageQueue", "AzureFunctionToolDefinition", + "BingCustomSearchToolDefinition", "BingGroundingToolDefinition", "CodeInterpreterToolDefinition", "CodeInterpreterToolResource", @@ -330,6 +337,7 @@ "RunStepCodeInterpreterToolCallDetails", "RunStepCodeInterpreterToolCallOutput", "RunStepCompletionUsage", + "RunStepCustomSearchToolCall", "RunStepDelta", "RunStepDeltaChunk", "RunStepDeltaCodeInterpreterDetailItemObject", @@ -356,9 +364,12 @@ "RunStepMessageCreationDetails", "RunStepMessageCreationReference", "RunStepMicrosoftFabricToolCall", + "RunStepOpenAPIToolCall", "RunStepSharepointToolCall", "RunStepToolCall", "RunStepToolCallDetails", + "SearchConfiguration", + "SearchConfigurationList", "SharepointToolDefinition", "SubmitToolOutputsAction", "SubmitToolOutputsDetails", @@ -378,6 +389,7 @@ "UpdateCodeInterpreterToolResourceOptions", "UpdateFileSearchToolResourceOptions", "UpdateToolResourcesOptions", + "UploadFileRequest", "VectorStore", "VectorStoreAutoChunkingStrategyRequest", "VectorStoreAutoChunkingStrategyResponse", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 70c4fd3daa6a..94643ebef31c 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -45,6 +45,8 @@ class AgentsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tool type ``sharepoint_grounding``""" AZURE_AI_SEARCH = "azure_ai_search" """Tool type ``azure_ai_search``""" + BING_CUSTOM_SEARCH = "bing_custom_search" + """Tool type ``bing_custom_search``""" class AgentStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index aefa9d474688..db0044be06be 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -13,6 +13,7 @@ from .. import _model_base from .._model_base import rest_discriminator, rest_field +from .._vendor import FileType from ._enums import ( AuthenticationType, OpenApiAuthType, @@ -225,7 +226,7 @@ class AgentsNamedToolChoice(_model_base.Model): :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", - "fabric_dataagent", "sharepoint_grounding", and "azure_ai_search". + "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", and "bing_custom_search". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType :ivar function: The name of the function to call. :vartype function: ~azure.ai.projects.models.FunctionName @@ -236,7 +237,8 @@ class AgentsNamedToolChoice(_model_base.Model): ) """the type of tool. If type is \"function\" , the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", - \"fabric_dataagent\", \"sharepoint_grounding\", and \"azure_ai_search\".""" + \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", and + \"bing_custom_search\".""" function: Optional["_models.FunctionName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call.""" @@ -656,9 +658,10 @@ class ToolDefinition(_model_base.Model): """An abstract representation of an input tool definition that an agent can use. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingGroundingToolDefinition, - CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, FileSearchToolDefinition, - FunctionToolDefinition, OpenApiToolDefinition, SharepointToolDefinition + AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingCustomSearchToolDefinition, + BingGroundingToolDefinition, CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, + FileSearchToolDefinition, FunctionToolDefinition, OpenApiToolDefinition, + SharepointToolDefinition :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -870,6 +873,43 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="azure_function", **kwargs) +class BingCustomSearchToolDefinition(ToolDefinition, discriminator="bing_custom_search"): + """The input definition information for a Bing custom search tool as used to configure an agent. + + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: The list of search configurations used by the bing custom search + tool. Required. + :vartype bing_custom_search: ~azure.ai.projects.models.SearchConfigurationList + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: "_models.SearchConfigurationList" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The list of search configurations used by the bing custom search tool. Required.""" + + @overload + def __init__( + self, + *, + bing_custom_search: "_models.SearchConfigurationList", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class BingGroundingToolDefinition(ToolDefinition, discriminator="bing_grounding"): """The input definition information for a bing grounding search tool as used to configure an agent. @@ -3805,6 +3845,10 @@ class OpenApiFunctionDefinition(_model_base.Model): :vartype spec: any :ivar auth: Open API authentication details. Required. :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] + :ivar functions: List of functions returned in response. + :vartype functions: list[~azure.ai.projects.models.FunctionDefinition] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3816,6 +3860,12 @@ class OpenApiFunctionDefinition(_model_base.Model): """The openapi function shape, described as a JSON Schema object. Required.""" auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Open API authentication details. Required.""" + default_params: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" + functions: Optional[List["_models.FunctionDefinition"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of functions returned in response.""" @overload def __init__( @@ -3825,6 +3875,8 @@ def __init__( spec: Any, auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, + default_params: Optional[List[str]] = None, + functions: Optional[List["_models.FunctionDefinition"]] = None, ) -> None: ... @overload @@ -4461,9 +4513,9 @@ class RunStepToolCall(_model_base.Model): existing run. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - RunStepAzureAISearchToolCall, RunStepBingGroundingToolCall, RunStepCodeInterpreterToolCall, - RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, RunStepFunctionToolCall, - RunStepSharepointToolCall + RunStepAzureAISearchToolCall, RunStepCustomSearchToolCall, RunStepBingGroundingToolCall, + RunStepCodeInterpreterToolCall, RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, + RunStepFunctionToolCall, RunStepOpenAPIToolCall, RunStepSharepointToolCall :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -4823,6 +4875,46 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class RunStepCustomSearchToolCall(RunStepToolCall, discriminator="bing_custom_search"): + """A record of a call to a bing custom search tool, issued by the model in evaluation of a defined + tool, that represents + executed search with bing custom search. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: Reserved for future use. Required. + :vartype bing_custom_search: dict[str, str] + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + bing_custom_search: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class RunStepDelta(_model_base.Model): """Represents the delta payload in a streaming run step delta chunk. @@ -5802,6 +5894,44 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="fabric_dataagent", **kwargs) +class RunStepOpenAPIToolCall(RunStepToolCall, discriminator="openapi"): + """A record of a call to an OpenAPI tool, issued by the model in evaluation of a defined tool, + that represents + executed OpenAPI operations. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'openapi'. Required. Default value is "openapi". + :vartype type: str + :ivar open_api: Reserved for future use. Required. + :vartype open_api: dict[str, str] + """ + + type: Literal["openapi"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required. Default value is \"openapi\".""" + open_api: Dict[str, str] = rest_field(name="openapi", visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + open_api: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="openapi", **kwargs) + + class RunStepSharepointToolCall(RunStepToolCall, discriminator="sharepoint_grounding"): """A record of a call to a SharePoint tool, issued by the model in evaluation of a defined tool, that represents @@ -5878,6 +6008,72 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type=RunStepType.TOOL_CALLS, **kwargs) +class SearchConfiguration(_model_base.Model): + """A custom search configuration. + + :ivar connection_id: A connection in a ToolConnectionList attached to this tool. Required. + :vartype connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + """ + + connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A connection in a ToolConnectionList attached to this tool. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + + @overload + def __init__( + self, + *, + connection_id: str, + instance_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SearchConfigurationList(_model_base.Model): + """A list of search configurations currently used by the ``bing_custom_search`` tool. + + :ivar search_configurations: The connections attached to this tool. There can be a maximum of 1 + connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.SearchConfiguration] + """ + + search_configurations: List["_models.SearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + search_configurations: List["_models.SearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class SharepointToolDefinition(ToolDefinition, discriminator="sharepoint_grounding"): """The input definition information for a sharepoint tool as used to configure an agent. @@ -6767,6 +6963,52 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class UploadFileRequest(_model_base.Model): + """UploadFileRequest. + + :ivar file: The file data, in bytes. Required. + :vartype file: ~azure.ai.projects._vendor.FileType + :ivar purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Required. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". + :vartype purpose: str or ~azure.ai.projects.models.FilePurpose + :ivar filename: The name of the file. + :vartype filename: str + """ + + file: FileType = rest_field( + visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True + ) + """The file data, in bytes. Required.""" + purpose: Union[str, "_models.FilePurpose"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The intended purpose of the uploaded file. Use ``assistants`` for Agents and Message files, + ``vision`` for Agents image file inputs, ``batch`` for Batch API, and ``fine-tune`` for + Fine-tuning. Required. Known values are: \"fine-tune\", \"fine-tune-results\", \"assistants\", + \"assistants_output\", \"batch\", \"batch_output\", and \"vision\".""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file.""" + + @overload + def __init__( + self, + *, + file: FileType, + purpose: Union[str, "_models.FilePurpose"], + filename: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class VectorStore(_model_base.Model): """A vector store is a collection of processed files can be used by the ``file_search`` tool. diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py index 7a231513bbd6..3c0f6e5a6987 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -36,7 +37,7 @@ import jsonref from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme +from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme project_client = AIProjectClient.from_connection_string( @@ -50,14 +51,16 @@ print(connection.id) -with open('./tripadvisor_openapi.json', 'r') as f: +with open("./tripadvisor_openapi.json", "r") as f: openapi_spec = jsonref.loads(f.read()) # Create Auth object for the OpenApiTool (note that connection or managed identity auth setup requires additional setup in Azure) auth = OpenApiConnectionAuthDetails(security_scheme=OpenApiConnectionSecurityScheme(connection_id=connection.id)) # Initialize an Agent OpenApi tool using the read in OpenAPI spec -openapi = OpenApiTool(name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth) +openapi = OpenApiTool( + name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth +) # Create an Agent with OpenApi tool and process Agent run with project_client: From 5e094fed2c4af30492fb64515aa3d65ccbb13978 Mon Sep 17 00:00:00 2001 From: Glenn Harper Date: Wed, 2 Apr 2025 13:24:11 -0700 Subject: [PATCH 04/13] Revert "auto-genned code" This reverts commit 47eea99f863dfc0ba8186980cd15a3043bb6ae83. --- .../azure/ai/projects/models/__init__.py | 12 - .../azure/ai/projects/models/_enums.py | 2 - .../azure/ai/projects/models/_models.py | 258 +----------------- .../sample_agents_openapi_connection_auth.py | 9 +- 4 files changed, 11 insertions(+), 270 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a3e6b9287cd0..a44d43c67e3b 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -29,7 +29,6 @@ AzureFunctionDefinition, AzureFunctionStorageQueue, AzureFunctionToolDefinition, - BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, @@ -116,7 +115,6 @@ RunStepCodeInterpreterToolCallDetails, RunStepCodeInterpreterToolCallOutput, RunStepCompletionUsage, - RunStepCustomSearchToolCall, RunStepDelta, RunStepDeltaChunk, RunStepDeltaCodeInterpreterDetailItemObject, @@ -143,12 +141,9 @@ RunStepMessageCreationDetails, RunStepMessageCreationReference, RunStepMicrosoftFabricToolCall, - RunStepOpenAPIToolCall, RunStepSharepointToolCall, RunStepToolCall, RunStepToolCallDetails, - SearchConfiguration, - SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, SubmitToolOutputsDetails, @@ -168,7 +163,6 @@ UpdateCodeInterpreterToolResourceOptions, UpdateFileSearchToolResourceOptions, UpdateToolResourcesOptions, - UploadFileRequest, VectorStore, VectorStoreAutoChunkingStrategyRequest, VectorStoreAutoChunkingStrategyResponse, @@ -250,7 +244,6 @@ "AzureFunctionDefinition", "AzureFunctionStorageQueue", "AzureFunctionToolDefinition", - "BingCustomSearchToolDefinition", "BingGroundingToolDefinition", "CodeInterpreterToolDefinition", "CodeInterpreterToolResource", @@ -337,7 +330,6 @@ "RunStepCodeInterpreterToolCallDetails", "RunStepCodeInterpreterToolCallOutput", "RunStepCompletionUsage", - "RunStepCustomSearchToolCall", "RunStepDelta", "RunStepDeltaChunk", "RunStepDeltaCodeInterpreterDetailItemObject", @@ -364,12 +356,9 @@ "RunStepMessageCreationDetails", "RunStepMessageCreationReference", "RunStepMicrosoftFabricToolCall", - "RunStepOpenAPIToolCall", "RunStepSharepointToolCall", "RunStepToolCall", "RunStepToolCallDetails", - "SearchConfiguration", - "SearchConfigurationList", "SharepointToolDefinition", "SubmitToolOutputsAction", "SubmitToolOutputsDetails", @@ -389,7 +378,6 @@ "UpdateCodeInterpreterToolResourceOptions", "UpdateFileSearchToolResourceOptions", "UpdateToolResourcesOptions", - "UploadFileRequest", "VectorStore", "VectorStoreAutoChunkingStrategyRequest", "VectorStoreAutoChunkingStrategyResponse", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 94643ebef31c..70c4fd3daa6a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -45,8 +45,6 @@ class AgentsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tool type ``sharepoint_grounding``""" AZURE_AI_SEARCH = "azure_ai_search" """Tool type ``azure_ai_search``""" - BING_CUSTOM_SEARCH = "bing_custom_search" - """Tool type ``bing_custom_search``""" class AgentStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index db0044be06be..aefa9d474688 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -13,7 +13,6 @@ from .. import _model_base from .._model_base import rest_discriminator, rest_field -from .._vendor import FileType from ._enums import ( AuthenticationType, OpenApiAuthType, @@ -226,7 +225,7 @@ class AgentsNamedToolChoice(_model_base.Model): :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", - "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", and "bing_custom_search". + "fabric_dataagent", "sharepoint_grounding", and "azure_ai_search". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType :ivar function: The name of the function to call. :vartype function: ~azure.ai.projects.models.FunctionName @@ -237,8 +236,7 @@ class AgentsNamedToolChoice(_model_base.Model): ) """the type of tool. If type is \"function\" , the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", - \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", and - \"bing_custom_search\".""" + \"fabric_dataagent\", \"sharepoint_grounding\", and \"azure_ai_search\".""" function: Optional["_models.FunctionName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call.""" @@ -658,10 +656,9 @@ class ToolDefinition(_model_base.Model): """An abstract representation of an input tool definition that an agent can use. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingCustomSearchToolDefinition, - BingGroundingToolDefinition, CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, - FileSearchToolDefinition, FunctionToolDefinition, OpenApiToolDefinition, - SharepointToolDefinition + AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingGroundingToolDefinition, + CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, FileSearchToolDefinition, + FunctionToolDefinition, OpenApiToolDefinition, SharepointToolDefinition :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -873,43 +870,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="azure_function", **kwargs) -class BingCustomSearchToolDefinition(ToolDefinition, discriminator="bing_custom_search"): - """The input definition information for a Bing custom search tool as used to configure an agent. - - :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is - "bing_custom_search". - :vartype type: str - :ivar bing_custom_search: The list of search configurations used by the bing custom search - tool. Required. - :vartype bing_custom_search: ~azure.ai.projects.models.SearchConfigurationList - """ - - type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_custom_search'. Required. Default value is - \"bing_custom_search\".""" - bing_custom_search: "_models.SearchConfigurationList" = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The list of search configurations used by the bing custom search tool. Required.""" - - @overload - def __init__( - self, - *, - bing_custom_search: "_models.SearchConfigurationList", - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, type="bing_custom_search", **kwargs) - - class BingGroundingToolDefinition(ToolDefinition, discriminator="bing_grounding"): """The input definition information for a bing grounding search tool as used to configure an agent. @@ -3845,10 +3805,6 @@ class OpenApiFunctionDefinition(_model_base.Model): :vartype spec: any :ivar auth: Open API authentication details. Required. :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails - :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. - :vartype default_params: list[str] - :ivar functions: List of functions returned in response. - :vartype functions: list[~azure.ai.projects.models.FunctionDefinition] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3860,12 +3816,6 @@ class OpenApiFunctionDefinition(_model_base.Model): """The openapi function shape, described as a JSON Schema object. Required.""" auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Open API authentication details. Required.""" - default_params: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """List of OpenAPI spec parameters that will use user-provided defaults.""" - functions: Optional[List["_models.FunctionDefinition"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of functions returned in response.""" @overload def __init__( @@ -3875,8 +3825,6 @@ def __init__( spec: Any, auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, - default_params: Optional[List[str]] = None, - functions: Optional[List["_models.FunctionDefinition"]] = None, ) -> None: ... @overload @@ -4513,9 +4461,9 @@ class RunStepToolCall(_model_base.Model): existing run. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - RunStepAzureAISearchToolCall, RunStepCustomSearchToolCall, RunStepBingGroundingToolCall, - RunStepCodeInterpreterToolCall, RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, - RunStepFunctionToolCall, RunStepOpenAPIToolCall, RunStepSharepointToolCall + RunStepAzureAISearchToolCall, RunStepBingGroundingToolCall, RunStepCodeInterpreterToolCall, + RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, RunStepFunctionToolCall, + RunStepSharepointToolCall :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -4875,46 +4823,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class RunStepCustomSearchToolCall(RunStepToolCall, discriminator="bing_custom_search"): - """A record of a call to a bing custom search tool, issued by the model in evaluation of a defined - tool, that represents - executed search with bing custom search. - - :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. - Required. - :vartype id: str - :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is - "bing_custom_search". - :vartype type: str - :ivar bing_custom_search: Reserved for future use. Required. - :vartype bing_custom_search: dict[str, str] - """ - - type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'bing_custom_search'. Required. Default value is - \"bing_custom_search\".""" - bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Reserved for future use. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - bing_custom_search: Dict[str, str], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, type="bing_custom_search", **kwargs) - - class RunStepDelta(_model_base.Model): """Represents the delta payload in a streaming run step delta chunk. @@ -5894,44 +5802,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="fabric_dataagent", **kwargs) -class RunStepOpenAPIToolCall(RunStepToolCall, discriminator="openapi"): - """A record of a call to an OpenAPI tool, issued by the model in evaluation of a defined tool, - that represents - executed OpenAPI operations. - - :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. - Required. - :vartype id: str - :ivar type: The object type, which is always 'openapi'. Required. Default value is "openapi". - :vartype type: str - :ivar open_api: Reserved for future use. Required. - :vartype open_api: dict[str, str] - """ - - type: Literal["openapi"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The object type, which is always 'openapi'. Required. Default value is \"openapi\".""" - open_api: Dict[str, str] = rest_field(name="openapi", visibility=["read", "create", "update", "delete", "query"]) - """Reserved for future use. Required.""" - - @overload - def __init__( - self, - *, - id: str, # pylint: disable=redefined-builtin - open_api: Dict[str, str], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, type="openapi", **kwargs) - - class RunStepSharepointToolCall(RunStepToolCall, discriminator="sharepoint_grounding"): """A record of a call to a SharePoint tool, issued by the model in evaluation of a defined tool, that represents @@ -6008,72 +5878,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type=RunStepType.TOOL_CALLS, **kwargs) -class SearchConfiguration(_model_base.Model): - """A custom search configuration. - - :ivar connection_id: A connection in a ToolConnectionList attached to this tool. Required. - :vartype connection_id: str - :ivar instance_name: Name of the custom configuration instance given to config. Required. - :vartype instance_name: str - """ - - connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """A connection in a ToolConnectionList attached to this tool. Required.""" - instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """Name of the custom configuration instance given to config. Required.""" - - @overload - def __init__( - self, - *, - connection_id: str, - instance_name: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class SearchConfigurationList(_model_base.Model): - """A list of search configurations currently used by the ``bing_custom_search`` tool. - - :ivar search_configurations: The connections attached to this tool. There can be a maximum of 1 - connection - resource attached to the tool. Required. - :vartype search_configurations: list[~azure.ai.projects.models.SearchConfiguration] - """ - - search_configurations: List["_models.SearchConfiguration"] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """The connections attached to this tool. There can be a maximum of 1 connection - resource attached to the tool. Required.""" - - @overload - def __init__( - self, - *, - search_configurations: List["_models.SearchConfiguration"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class SharepointToolDefinition(ToolDefinition, discriminator="sharepoint_grounding"): """The input definition information for a sharepoint tool as used to configure an agent. @@ -6963,52 +6767,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UploadFileRequest(_model_base.Model): - """UploadFileRequest. - - :ivar file: The file data, in bytes. Required. - :vartype file: ~azure.ai.projects._vendor.FileType - :ivar purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Required. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". - :vartype purpose: str or ~azure.ai.projects.models.FilePurpose - :ivar filename: The name of the file. - :vartype filename: str - """ - - file: FileType = rest_field( - visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True - ) - """The file data, in bytes. Required.""" - purpose: Union[str, "_models.FilePurpose"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The intended purpose of the uploaded file. Use ``assistants`` for Agents and Message files, - ``vision`` for Agents image file inputs, ``batch`` for Batch API, and ``fine-tune`` for - Fine-tuning. Required. Known values are: \"fine-tune\", \"fine-tune-results\", \"assistants\", - \"assistants_output\", \"batch\", \"batch_output\", and \"vision\".""" - filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The name of the file.""" - - @overload - def __init__( - self, - *, - file: FileType, - purpose: Union[str, "_models.FilePurpose"], - filename: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class VectorStore(_model_base.Model): """A vector store is a collection of processed files can be used by the ``file_search`` tool. diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py index 3c0f6e5a6987..7a231513bbd6 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py @@ -1,4 +1,3 @@ -# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -37,7 +36,7 @@ import jsonref from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme +from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme project_client = AIProjectClient.from_connection_string( @@ -51,16 +50,14 @@ print(connection.id) -with open("./tripadvisor_openapi.json", "r") as f: +with open('./tripadvisor_openapi.json', 'r') as f: openapi_spec = jsonref.loads(f.read()) # Create Auth object for the OpenApiTool (note that connection or managed identity auth setup requires additional setup in Azure) auth = OpenApiConnectionAuthDetails(security_scheme=OpenApiConnectionSecurityScheme(connection_id=connection.id)) # Initialize an Agent OpenApi tool using the read in OpenAPI spec -openapi = OpenApiTool( - name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth -) +openapi = OpenApiTool(name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth) # Create an Agent with OpenApi tool and process Agent run with project_client: From 828a973975f3b0f98e3065964db41460af61c4b5 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Thu, 3 Apr 2025 10:41:47 -0700 Subject: [PATCH 05/13] Re-emit from latest TypeSpec --- sdk/ai/azure-ai-projects/MANIFEST.in | 2 +- .../azure-ai-projects/apiview-properties.json | 264 ++++++ .../azure/ai/projects/_client.py | 18 +- .../azure/ai/projects/_configuration.py | 6 +- .../azure/ai/projects/aio/_client.py | 18 +- .../azure/ai/projects/aio/_configuration.py | 6 +- .../ai/projects/aio/operations/_operations.py | 781 +++++++++++------- .../azure/ai/projects/models/__init__.py | 12 + .../azure/ai/projects/models/_enums.py | 4 +- .../azure/ai/projects/models/_models.py | 295 ++++++- .../azure/ai/projects/models/_patch.py | 2 +- .../ai/projects/operations/_operations.py | 770 ++++++++++------- .../azure/ai/projects/prompts/__init__.py | 1 + .../azure/ai/projects/prompts/_patch.py | 3 +- .../agents/sample_agents_azure_ai_search.py | 6 +- .../sample_agents_openapi_connection_auth.py | 14 +- ...stream_eventhandler_with_bing_grounding.py | 1 + ...ts_stream_iteration_with_bing_grounding.py | 1 + ...e_ai_inference_client_and_prompt_string.py | 5 +- ...h_azure_ai_inference_client_and_prompty.py | 4 +- .../tests/agents/overload_assert_utils.py | 1 + .../tests/agents/test_agent_models.py | 1 + .../tests/agents/test_agent_models_async.py | 1 + .../tests/agents/test_agents_client.py | 32 +- .../tests/agents/test_overload_assert.py | 1 + sdk/ai/azure-ai-projects/tsp-location.yaml | 2 +- 26 files changed, 1577 insertions(+), 674 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/apiview-properties.json diff --git a/sdk/ai/azure-ai-projects/MANIFEST.in b/sdk/ai/azure-ai-projects/MANIFEST.in index aee9f8c1ccc3..fba50036b227 100644 --- a/sdk/ai/azure-ai-projects/MANIFEST.in +++ b/sdk/ai/azure-ai-projects/MANIFEST.in @@ -4,4 +4,4 @@ include azure/ai/projects/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py -include azure/ai/__init__.py \ No newline at end of file +include azure/ai/__init__.py diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json new file mode 100644 index 000000000000..b19333997594 --- /dev/null +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -0,0 +1,264 @@ +{ + "CrossLanguagePackageId": "Azure.AI.Projects", + "CrossLanguageDefinitionId": { + "azure.ai.projects.models.Agent": "Azure.AI.Projects.Agents.Agent", + "azure.ai.projects.models.AgentDeletionStatus": "Azure.AI.Projects.Agents.AgentDeletionStatus", + "azure.ai.projects.models.AgentsApiResponseFormat": "Azure.AI.Projects.Agents.AgentsApiResponseFormat", + "azure.ai.projects.models.AgentsNamedToolChoice": "Azure.AI.Projects.Agents.AgentsNamedToolChoice", + "azure.ai.projects.models.AgentThread": "Azure.AI.Projects.Agents.AgentThread", + "azure.ai.projects.models.AgentThreadCreationOptions": "Azure.AI.Projects.Agents.AgentThreadCreationOptions", + "azure.ai.projects.models.AISearchIndexResource": "Azure.AI.Projects.Agents.AISearchIndexResource", + "azure.ai.projects.models.TargetModelConfig": "Azure.AI.Projects.TargetModelConfig", + "azure.ai.projects.models.AOAIModelConfig": "Azure.AI.Projects.AOAIModelConfig", + "azure.ai.projects.models.InputData": "Azure.AI.Projects.InputData", + "azure.ai.projects.models.ApplicationInsightsConfiguration": "Azure.AI.Projects.ApplicationInsightsConfiguration", + "azure.ai.projects.models.AzureAISearchResource": "Azure.AI.Projects.Agents.AzureAISearchResource", + "azure.ai.projects.models.ToolDefinition": "Azure.AI.Projects.Agents.ToolDefinition", + "azure.ai.projects.models.AzureAISearchToolDefinition": "Azure.AI.Projects.Agents.AzureAISearchToolDefinition", + "azure.ai.projects.models.AzureFunctionBinding": "Azure.AI.Projects.Agents.AzureFunctionBinding", + "azure.ai.projects.models.AzureFunctionDefinition": "Azure.AI.Projects.Agents.AzureFunctionDefinition", + "azure.ai.projects.models.AzureFunctionStorageQueue": "Azure.AI.Projects.Agents.AzureFunctionStorageQueue", + "azure.ai.projects.models.AzureFunctionToolDefinition": "Azure.AI.Projects.Agents.AzureFunctionToolDefinition", + "azure.ai.projects.models.BingCustomSearchToolDefinition": "Azure.AI.Projects.Agents.BingCustomSearchToolDefinition", + "azure.ai.projects.models.BingGroundingToolDefinition": "Azure.AI.Projects.Agents.BingGroundingToolDefinition", + "azure.ai.projects.models.CodeInterpreterToolDefinition": "Azure.AI.Projects.Agents.CodeInterpreterToolDefinition", + "azure.ai.projects.models.CodeInterpreterToolResource": "Azure.AI.Projects.Agents.CodeInterpreterToolResource", + "azure.ai.projects.models.Trigger": "Azure.AI.Projects.Trigger", + "azure.ai.projects.models.CronTrigger": "Azure.AI.Projects.CronTrigger", + "azure.ai.projects.models.Dataset": "Azure.AI.Projects.Dataset", + "azure.ai.projects.models.Evaluation": "Azure.AI.Projects.Evaluation", + "azure.ai.projects.models.EvaluationSchedule": "Azure.AI.Projects.EvaluationSchedule", + "azure.ai.projects.models.EvaluationTarget": "Azure.AI.Projects.EvaluationTarget", + "azure.ai.projects.models.EvaluatorConfiguration": "Azure.AI.Projects.EvaluatorConfiguration", + "azure.ai.projects.models.FileDeletionStatus": "Azure.AI.Projects.Agents.FileDeletionStatus", + "azure.ai.projects.models.FileListResponse": "Azure.AI.Projects.Agents.FileListResponse", + "azure.ai.projects.models.FileSearchRankingOptions": "Azure.AI.Projects.Agents.FileSearchRankingOptions", + "azure.ai.projects.models.FileSearchToolCallContent": "Azure.AI.Projects.Agents.FileSearchToolCallContent", + "azure.ai.projects.models.FileSearchToolDefinition": "Azure.AI.Projects.Agents.FileSearchToolDefinition", + "azure.ai.projects.models.FileSearchToolDefinitionDetails": "Azure.AI.Projects.Agents.FileSearchToolDefinitionDetails", + "azure.ai.projects.models.FileSearchToolResource": "Azure.AI.Projects.Agents.FileSearchToolResource", + "azure.ai.projects.models.FunctionDefinition": "Azure.AI.Projects.Agents.FunctionDefinition", + "azure.ai.projects.models.FunctionName": "Azure.AI.Projects.Agents.FunctionName", + "azure.ai.projects.models.FunctionToolDefinition": "Azure.AI.Projects.Agents.FunctionToolDefinition", + "azure.ai.projects.models.IncompleteRunDetails": "Azure.AI.Projects.Agents.IncompleteRunDetails", + "azure.ai.projects.models.MAASModelConfig": "Azure.AI.Projects.MAASModelConfig", + "azure.ai.projects.models.MessageAttachment": "Azure.AI.Projects.Agents.MessageAttachment", + "azure.ai.projects.models.MessageContent": "Azure.AI.Projects.Agents.MessageContent", + "azure.ai.projects.models.MessageDelta": "Azure.AI.Projects.Agents.MessageDelta", + "azure.ai.projects.models.MessageDeltaChunk": "Azure.AI.Projects.Agents.MessageDeltaChunk", + "azure.ai.projects.models.MessageDeltaContent": "Azure.AI.Projects.Agents.MessageDeltaContent", + "azure.ai.projects.models.MessageDeltaImageFileContent": "Azure.AI.Projects.Agents.MessageDeltaImageFileContent", + "azure.ai.projects.models.MessageDeltaImageFileContentObject": "Azure.AI.Projects.Agents.MessageDeltaImageFileContentObject", + "azure.ai.projects.models.MessageDeltaTextAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextAnnotation", + "azure.ai.projects.models.MessageDeltaTextContent": "Azure.AI.Projects.Agents.MessageDeltaTextContent", + "azure.ai.projects.models.MessageDeltaTextContentObject": "Azure.AI.Projects.Agents.MessageDeltaTextContentObject", + "azure.ai.projects.models.MessageDeltaTextFileCitationAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextFileCitationAnnotation", + "azure.ai.projects.models.MessageDeltaTextFileCitationAnnotationObject": "Azure.AI.Projects.Agents.MessageDeltaTextFileCitationAnnotationObject", + "azure.ai.projects.models.MessageDeltaTextFilePathAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextFilePathAnnotation", + "azure.ai.projects.models.MessageDeltaTextFilePathAnnotationObject": "Azure.AI.Projects.Agents.MessageDeltaTextFilePathAnnotationObject", + "azure.ai.projects.models.MessageDeltaTextUrlCitationAnnotation": "Azure.AI.Projects.Agents.MessageDeltaTextUrlCitationAnnotation", + "azure.ai.projects.models.MessageDeltaTextUrlCitationDetails": "Azure.AI.Projects.Agents.MessageDeltaTextUrlCitationDetails", + "azure.ai.projects.models.MessageImageFileContent": "Azure.AI.Projects.Agents.MessageImageFileContent", + "azure.ai.projects.models.MessageImageFileDetails": "Azure.AI.Projects.Agents.MessageImageFileDetails", + "azure.ai.projects.models.MessageIncompleteDetails": "Azure.AI.Projects.Agents.MessageIncompleteDetails", + "azure.ai.projects.models.MessageTextAnnotation": "Azure.AI.Projects.Agents.MessageTextAnnotation", + "azure.ai.projects.models.MessageTextContent": "Azure.AI.Projects.Agents.MessageTextContent", + "azure.ai.projects.models.MessageTextDetails": "Azure.AI.Projects.Agents.MessageTextDetails", + "azure.ai.projects.models.MessageTextFileCitationAnnotation": "Azure.AI.Projects.Agents.MessageTextFileCitationAnnotation", + "azure.ai.projects.models.MessageTextFileCitationDetails": "Azure.AI.Projects.Agents.MessageTextFileCitationDetails", + "azure.ai.projects.models.MessageTextFilePathAnnotation": "Azure.AI.Projects.Agents.MessageTextFilePathAnnotation", + "azure.ai.projects.models.MessageTextFilePathDetails": "Azure.AI.Projects.Agents.MessageTextFilePathDetails", + "azure.ai.projects.models.MessageTextUrlCitationAnnotation": "Azure.AI.Projects.Agents.MessageTextUrlCitationAnnotation", + "azure.ai.projects.models.MessageTextUrlCitationDetails": "Azure.AI.Projects.Agents.MessageTextUrlCitationDetails", + "azure.ai.projects.models.MicrosoftFabricToolDefinition": "Azure.AI.Projects.Agents.MicrosoftFabricToolDefinition", + "azure.ai.projects.models.OpenAIFile": "Azure.AI.Projects.Agents.OpenAIFile", + "azure.ai.projects.models.OpenAIPageableListOfAgent": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfRunStep": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfThreadMessage": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfThreadRun": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfVectorStore": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfVectorStoreFile": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenApiAuthDetails": "Azure.AI.Projects.Agents.OpenApiAuthDetails", + "azure.ai.projects.models.OpenApiAnonymousAuthDetails": "Azure.AI.Projects.Agents.OpenApiAnonymousAuthDetails", + "azure.ai.projects.models.OpenApiConnectionAuthDetails": "Azure.AI.Projects.Agents.OpenApiConnectionAuthDetails", + "azure.ai.projects.models.OpenApiConnectionSecurityScheme": "Azure.AI.Projects.Agents.OpenApiConnectionSecurityScheme", + "azure.ai.projects.models.OpenApiFunctionDefinition": "Azure.AI.Projects.Agents.OpenApiFunctionDefinition", + "azure.ai.projects.models.OpenApiManagedAuthDetails": "Azure.AI.Projects.Agents.OpenApiManagedAuthDetails", + "azure.ai.projects.models.OpenApiManagedSecurityScheme": "Azure.AI.Projects.Agents.OpenApiManagedSecurityScheme", + "azure.ai.projects.models.OpenApiToolDefinition": "Azure.AI.Projects.Agents.OpenApiToolDefinition", + "azure.ai.projects.models.RecurrenceSchedule": "Azure.AI.Projects.RecurrenceSchedule", + "azure.ai.projects.models.RecurrenceTrigger": "Azure.AI.Projects.RecurrenceTrigger", + "azure.ai.projects.models.RequiredAction": "Azure.AI.Projects.Agents.RequiredAction", + "azure.ai.projects.models.RequiredToolCall": "Azure.AI.Projects.Agents.RequiredToolCall", + "azure.ai.projects.models.RequiredFunctionToolCall": "Azure.AI.Projects.Agents.RequiredFunctionToolCall", + "azure.ai.projects.models.RequiredFunctionToolCallDetails": "Azure.AI.Projects.Agents.RequiredFunctionToolCallDetails", + "azure.ai.projects.models.ResponseFormatJsonSchema": "Azure.AI.Projects.Agents.ResponseFormatJsonSchema", + "azure.ai.projects.models.ResponseFormatJsonSchemaType": "Azure.AI.Projects.Agents.ResponseFormatJsonSchemaType", + "azure.ai.projects.models.RunCompletionUsage": "Azure.AI.Projects.Agents.RunCompletionUsage", + "azure.ai.projects.models.RunError": "Azure.AI.Projects.Agents.RunError", + "azure.ai.projects.models.RunStep": "Azure.AI.Projects.Agents.RunStep", + "azure.ai.projects.models.RunStepToolCall": "Azure.AI.Projects.Agents.RunStepToolCall", + "azure.ai.projects.models.RunStepAzureAISearchToolCall": "Azure.AI.Projects.Agents.RunStepAzureAISearchToolCall", + "azure.ai.projects.models.RunStepBingGroundingToolCall": "Azure.AI.Projects.Agents.RunStepBingGroundingToolCall", + "azure.ai.projects.models.RunStepCodeInterpreterToolCallOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCallOutput", + "azure.ai.projects.models.RunStepCodeInterpreterImageOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterImageOutput", + "azure.ai.projects.models.RunStepCodeInterpreterImageReference": "Azure.AI.Projects.Agents.RunStepCodeInterpreterImageReference", + "azure.ai.projects.models.RunStepCodeInterpreterLogOutput": "Azure.AI.Projects.Agents.RunStepCodeInterpreterLogOutput", + "azure.ai.projects.models.RunStepCodeInterpreterToolCall": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCall", + "azure.ai.projects.models.RunStepCodeInterpreterToolCallDetails": "Azure.AI.Projects.Agents.RunStepCodeInterpreterToolCallDetails", + "azure.ai.projects.models.RunStepCompletionUsage": "Azure.AI.Projects.Agents.RunStepCompletionUsage", + "azure.ai.projects.models.RunStepCustomSearchToolCall": "Azure.AI.Projects.Agents.RunStepCustomSearchToolCall", + "azure.ai.projects.models.RunStepDelta": "Azure.AI.Projects.Agents.RunStepDelta", + "azure.ai.projects.models.RunStepDeltaChunk": "Azure.AI.Projects.Agents.RunStepDeltaChunk", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterDetailItemObject": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterDetailItemObject", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterOutput", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterImageOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterImageOutput", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterImageOutputObject": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterImageOutputObject", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterLogOutput": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterLogOutput", + "azure.ai.projects.models.RunStepDeltaToolCall": "Azure.AI.Projects.Agents.RunStepDeltaToolCall", + "azure.ai.projects.models.RunStepDeltaCodeInterpreterToolCall": "Azure.AI.Projects.Agents.RunStepDeltaCodeInterpreterToolCall", + "azure.ai.projects.models.RunStepDeltaDetail": "Azure.AI.Projects.Agents.RunStepDeltaDetail", + "azure.ai.projects.models.RunStepDeltaFileSearchToolCall": "Azure.AI.Projects.Agents.RunStepDeltaFileSearchToolCall", + "azure.ai.projects.models.RunStepDeltaFunction": "Azure.AI.Projects.Agents.RunStepDeltaFunction", + "azure.ai.projects.models.RunStepDeltaFunctionToolCall": "Azure.AI.Projects.Agents.RunStepDeltaFunctionToolCall", + "azure.ai.projects.models.RunStepDeltaMessageCreation": "Azure.AI.Projects.Agents.RunStepDeltaMessageCreation", + "azure.ai.projects.models.RunStepDeltaMessageCreationObject": "Azure.AI.Projects.Agents.RunStepDeltaMessageCreationObject", + "azure.ai.projects.models.RunStepDeltaToolCallObject": "Azure.AI.Projects.Agents.RunStepDeltaToolCallObject", + "azure.ai.projects.models.RunStepDetails": "Azure.AI.Projects.Agents.RunStepDetails", + "azure.ai.projects.models.RunStepError": "Azure.AI.Projects.Agents.RunStepError", + "azure.ai.projects.models.RunStepFileSearchToolCall": "Azure.AI.Projects.Agents.RunStepFileSearchToolCall", + "azure.ai.projects.models.RunStepFileSearchToolCallResult": "Azure.AI.Projects.Agents.RunStepFileSearchToolCallResult", + "azure.ai.projects.models.RunStepFileSearchToolCallResults": "Azure.AI.Projects.Agents.RunStepFileSearchToolCallResults", + "azure.ai.projects.models.RunStepFunctionToolCall": "Azure.AI.Projects.Agents.RunStepFunctionToolCall", + "azure.ai.projects.models.RunStepFunctionToolCallDetails": "Azure.AI.Projects.Agents.RunStepFunctionToolCallDetails", + "azure.ai.projects.models.RunStepMessageCreationDetails": "Azure.AI.Projects.Agents.RunStepMessageCreationDetails", + "azure.ai.projects.models.RunStepMessageCreationReference": "Azure.AI.Projects.Agents.RunStepMessageCreationReference", + "azure.ai.projects.models.RunStepMicrosoftFabricToolCall": "Azure.AI.Projects.Agents.RunStepMicrosoftFabricToolCall", + "azure.ai.projects.models.RunStepOpenAPIToolCall": "Azure.AI.Projects.Agents.RunStepOpenAPIToolCall", + "azure.ai.projects.models.RunStepSharepointToolCall": "Azure.AI.Projects.Agents.RunStepSharepointToolCall", + "azure.ai.projects.models.RunStepToolCallDetails": "Azure.AI.Projects.Agents.RunStepToolCallDetails", + "azure.ai.projects.models.SearchConfiguration": "Azure.AI.Projects.Agents.SearchConfiguration", + "azure.ai.projects.models.SearchConfigurationList": "Azure.AI.Projects.Agents.SearchConfigurationList", + "azure.ai.projects.models.SharepointToolDefinition": "Azure.AI.Projects.Agents.SharepointToolDefinition", + "azure.ai.projects.models.SubmitToolOutputsAction": "Azure.AI.Projects.Agents.SubmitToolOutputsAction", + "azure.ai.projects.models.SubmitToolOutputsDetails": "Azure.AI.Projects.Agents.SubmitToolOutputsDetails", + "azure.ai.projects.models.SystemData": "Azure.AI.Projects.SystemData", + "azure.ai.projects.models.ThreadDeletionStatus": "Azure.AI.Projects.Agents.ThreadDeletionStatus", + "azure.ai.projects.models.ThreadMessage": "Azure.AI.Projects.Agents.ThreadMessage", + "azure.ai.projects.models.ThreadMessageOptions": "Azure.AI.Projects.Agents.ThreadMessageOptions", + "azure.ai.projects.models.ThreadRun": "Azure.AI.Projects.Agents.ThreadRun", + "azure.ai.projects.models.ToolConnection": "Azure.AI.Projects.Agents.ToolConnection", + "azure.ai.projects.models.ToolConnectionList": "Azure.AI.Projects.Agents.ToolConnectionList", + "azure.ai.projects.models.ToolOutput": "Azure.AI.Projects.Agents.ToolOutput", + "azure.ai.projects.models.ToolResources": "Azure.AI.Projects.Agents.ToolResources", + "azure.ai.projects.models.TruncationObject": "Azure.AI.Projects.Agents.TruncationObject", + "azure.ai.projects.models.UpdateCodeInterpreterToolResourceOptions": "Azure.AI.Projects.Agents.UpdateCodeInterpreterToolResourceOptions", + "azure.ai.projects.models.UpdateFileSearchToolResourceOptions": "Azure.AI.Projects.Agents.UpdateFileSearchToolResourceOptions", + "azure.ai.projects.models.UpdateToolResourcesOptions": "Azure.AI.Projects.Agents.UpdateToolResourcesOptions", + "azure.ai.projects.models.UploadFileRequest": "Azure.AI.Projects.Agents.uploadFile.Request.anonymous", + "azure.ai.projects.models.VectorStore": "Azure.AI.Projects.Agents.VectorStore", + "azure.ai.projects.models.VectorStoreChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreAutoChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreAutoChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyResponse", + "azure.ai.projects.models.VectorStoreAutoChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreAutoChunkingStrategyResponse", + "azure.ai.projects.models.VectorStoreConfiguration": "Azure.AI.Projects.Agents.VectorStoreConfiguration", + "azure.ai.projects.models.VectorStoreConfigurations": "Azure.AI.Projects.Agents.VectorStoreConfigurations", + "azure.ai.projects.models.VectorStoreDataSource": "Azure.AI.Projects.Agents.VectorStoreDataSource", + "azure.ai.projects.models.VectorStoreDeletionStatus": "Azure.AI.Projects.Agents.VectorStoreDeletionStatus", + "azure.ai.projects.models.VectorStoreExpirationPolicy": "Azure.AI.Projects.Agents.VectorStoreExpirationPolicy", + "azure.ai.projects.models.VectorStoreFile": "Azure.AI.Projects.Agents.VectorStoreFile", + "azure.ai.projects.models.VectorStoreFileBatch": "Azure.AI.Projects.Agents.VectorStoreFileBatch", + "azure.ai.projects.models.VectorStoreFileCount": "Azure.AI.Projects.Agents.VectorStoreFileCount", + "azure.ai.projects.models.VectorStoreFileDeletionStatus": "Azure.AI.Projects.Agents.VectorStoreFileDeletionStatus", + "azure.ai.projects.models.VectorStoreFileError": "Azure.AI.Projects.Agents.VectorStoreFileError", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyOptions": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyOptions", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyRequest", + "azure.ai.projects.models.VectorStoreStaticChunkingStrategyResponse": "Azure.AI.Projects.Agents.VectorStoreStaticChunkingStrategyResponse", + "azure.ai.projects.models.OpenApiAuthType": "Azure.AI.Projects.Agents.OpenApiAuthType", + "azure.ai.projects.models.VectorStoreDataSourceAssetType": "Azure.AI.Projects.Agents.VectorStoreDataSourceAssetType", + "azure.ai.projects.models.AzureAISearchQueryType": "Azure.AI.Projects.Agents.AzureAISearchQueryType", + "azure.ai.projects.models.AgentsApiResponseFormatMode": "Azure.AI.Projects.Agents.AgentsApiResponseFormatMode", + "azure.ai.projects.models.ResponseFormat": "Azure.AI.Projects.Agents.ResponseFormat", + "azure.ai.projects.models.ListSortOrder": "Azure.AI.Projects.Agents.ListSortOrder", + "azure.ai.projects.models.MessageRole": "Azure.AI.Projects.Agents.MessageRole", + "azure.ai.projects.models.MessageStatus": "Azure.AI.Projects.Agents.MessageStatus", + "azure.ai.projects.models.MessageIncompleteDetailsReason": "Azure.AI.Projects.Agents.MessageIncompleteDetailsReason", + "azure.ai.projects.models.RunStatus": "Azure.AI.Projects.Agents.RunStatus", + "azure.ai.projects.models.IncompleteDetailsReason": "Azure.AI.Projects.Agents.IncompleteDetailsReason", + "azure.ai.projects.models.TruncationStrategy": "Azure.AI.Projects.Agents.TruncationStrategy", + "azure.ai.projects.models.AgentsApiToolChoiceOptionMode": "Azure.AI.Projects.Agents.AgentsApiToolChoiceOptionMode", + "azure.ai.projects.models.AgentsNamedToolChoiceType": "Azure.AI.Projects.Agents.AgentsNamedToolChoiceType", + "azure.ai.projects.models.RunAdditionalFieldList": "Azure.AI.Projects.Agents.RunAdditionalFieldList", + "azure.ai.projects.models.RunStepType": "Azure.AI.Projects.Agents.RunStepType", + "azure.ai.projects.models.RunStepStatus": "Azure.AI.Projects.Agents.RunStepStatus", + "azure.ai.projects.models.RunStepErrorCode": "Azure.AI.Projects.Agents.RunStepErrorCode", + "azure.ai.projects.models.FilePurpose": "Azure.AI.Projects.Agents.FilePurpose", + "azure.ai.projects.models.FileState": "Azure.AI.Projects.Agents.FileState", + "azure.ai.projects.models.VectorStoreStatus": "Azure.AI.Projects.Agents.VectorStoreStatus", + "azure.ai.projects.models.VectorStoreExpirationPolicyAnchor": "Azure.AI.Projects.Agents.VectorStoreExpirationPolicyAnchor", + "azure.ai.projects.models.VectorStoreChunkingStrategyRequestType": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyRequestType", + "azure.ai.projects.models.VectorStoreFileStatus": "Azure.AI.Projects.Agents.VectorStoreFileStatus", + "azure.ai.projects.models.VectorStoreFileErrorCode": "Azure.AI.Projects.Agents.VectorStoreFileErrorCode", + "azure.ai.projects.models.VectorStoreChunkingStrategyResponseType": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyResponseType", + "azure.ai.projects.models.VectorStoreFileStatusFilter": "Azure.AI.Projects.Agents.VectorStoreFileStatusFilter", + "azure.ai.projects.models.VectorStoreFileBatchStatus": "Azure.AI.Projects.Agents.VectorStoreFileBatchStatus", + "azure.ai.projects.models.AuthenticationType": "Azure.AI.Projects.AuthenticationType", + "azure.ai.projects.models.ConnectionType": "Azure.AI.Projects.ConnectionType", + "azure.ai.projects.models.Frequency": "Azure.AI.Projects.Frequency", + "azure.ai.projects.models.WeekDays": "Azure.AI.Projects.WeekDays", + "azure.ai.projects.models.ThreadStreamEvent": "Azure.AI.Projects.Agents.ThreadStreamEvent", + "azure.ai.projects.models.RunStreamEvent": "Azure.AI.Projects.Agents.RunStreamEvent", + "azure.ai.projects.models.RunStepStreamEvent": "Azure.AI.Projects.Agents.RunStepStreamEvent", + "azure.ai.projects.models.MessageStreamEvent": "Azure.AI.Projects.Agents.MessageStreamEvent", + "azure.ai.projects.models.ErrorEvent": "Azure.AI.Projects.Agents.ErrorEvent", + "azure.ai.projects.models.DoneEvent": "Azure.AI.Projects.Agents.DoneEvent", + "azure.ai.projects.models.AgentStreamEvent": "Azure.AI.Projects.Agents.AgentStreamEvent", + "azure.ai.projects.AIProjectClient.agents.create_agent": "Azure.AI.Projects.Agents.createAgent", + "azure.ai.projects.AIProjectClient.agents.list_agents": "Azure.AI.Projects.Agents.listAgents", + "azure.ai.projects.AIProjectClient.agents.get_agent": "Azure.AI.Projects.Agents.getAgent", + "azure.ai.projects.AIProjectClient.agents.update_agent": "Azure.AI.Projects.Agents.updateAgent", + "azure.ai.projects.AIProjectClient.agents.delete_agent": "Azure.AI.Projects.Agents.deleteAgent", + "azure.ai.projects.AIProjectClient.agents.create_thread": "Azure.AI.Projects.Agents.createThread", + "azure.ai.projects.AIProjectClient.agents.get_thread": "Azure.AI.Projects.Agents.getThread", + "azure.ai.projects.AIProjectClient.agents.update_thread": "Azure.AI.Projects.Agents.updateThread", + "azure.ai.projects.AIProjectClient.agents.delete_thread": "Azure.AI.Projects.Agents.deleteThread", + "azure.ai.projects.AIProjectClient.agents.create_message": "Azure.AI.Projects.Agents.createMessage", + "azure.ai.projects.AIProjectClient.agents.list_messages": "Azure.AI.Projects.Agents.listMessages", + "azure.ai.projects.AIProjectClient.agents.get_message": "Azure.AI.Projects.Agents.getMessage", + "azure.ai.projects.AIProjectClient.agents.update_message": "Azure.AI.Projects.Agents.updateMessage", + "azure.ai.projects.AIProjectClient.agents.create_run": "Azure.AI.Projects.Agents.createRun", + "azure.ai.projects.AIProjectClient.agents.list_runs": "Azure.AI.Projects.Agents.listRuns", + "azure.ai.projects.AIProjectClient.agents.get_run": "Azure.AI.Projects.Agents.getRun", + "azure.ai.projects.AIProjectClient.agents.update_run": "Azure.AI.Projects.Agents.updateRun", + "azure.ai.projects.AIProjectClient.agents.submit_tool_outputs_to_run": "Azure.AI.Projects.Agents.submitToolOutputsToRun", + "azure.ai.projects.AIProjectClient.agents.cancel_run": "Azure.AI.Projects.Agents.cancelRun", + "azure.ai.projects.AIProjectClient.agents.create_thread_and_run": "Azure.AI.Projects.Agents.createThreadAndRun", + "azure.ai.projects.AIProjectClient.agents.get_run_step": "Azure.AI.Projects.Agents.getRunStep", + "azure.ai.projects.AIProjectClient.agents.list_run_steps": "Azure.AI.Projects.Agents.listRunSteps", + "azure.ai.projects.AIProjectClient.agents.list_files": "Azure.AI.Projects.Agents.listFiles", + "azure.ai.projects.AIProjectClient.agents.upload_file": "Azure.AI.Projects.Agents.uploadFile", + "azure.ai.projects.AIProjectClient.agents.delete_file": "Azure.AI.Projects.Agents.deleteFile", + "azure.ai.projects.AIProjectClient.agents.get_file": "Azure.AI.Projects.Agents.getFile", + "azure.ai.projects.AIProjectClient.agents.list_vector_stores": "Azure.AI.Projects.Agents.listVectorStores", + "azure.ai.projects.AIProjectClient.agents.create_vector_store": "Azure.AI.Projects.Agents.createVectorStore", + "azure.ai.projects.AIProjectClient.agents.get_vector_store": "Azure.AI.Projects.Agents.getVectorStore", + "azure.ai.projects.AIProjectClient.agents.modify_vector_store": "Azure.AI.Projects.Agents.modifyVectorStore", + "azure.ai.projects.AIProjectClient.agents.delete_vector_store": "Azure.AI.Projects.Agents.deleteVectorStore", + "azure.ai.projects.AIProjectClient.agents.list_vector_store_files": "Azure.AI.Projects.Agents.listVectorStoreFiles", + "azure.ai.projects.AIProjectClient.agents.create_vector_store_file": "Azure.AI.Projects.Agents.createVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.get_vector_store_file": "Azure.AI.Projects.Agents.getVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.delete_vector_store_file": "Azure.AI.Projects.Agents.deleteVectorStoreFile", + "azure.ai.projects.AIProjectClient.agents.create_vector_store_file_batch": "Azure.AI.Projects.Agents.createVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.get_vector_store_file_batch": "Azure.AI.Projects.Agents.getVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.cancel_vector_store_file_batch": "Azure.AI.Projects.Agents.cancelVectorStoreFileBatch", + "azure.ai.projects.AIProjectClient.agents.list_vector_store_file_batch_files": "Azure.AI.Projects.Agents.listVectorStoreFileBatchFiles", + "azure.ai.projects.AIProjectClient.evaluations.get": "Azure.AI.Projects.Evaluations.get", + "azure.ai.projects.AIProjectClient.evaluations.create": "Azure.AI.Projects.Evaluations.create", + "azure.ai.projects.AIProjectClient.evaluations.list": "Azure.AI.Projects.Evaluations.list", + "azure.ai.projects.AIProjectClient.evaluations.update": "Azure.AI.Projects.Evaluations.update", + "azure.ai.projects.AIProjectClient.evaluations.get_schedule": "Azure.AI.Projects.Evaluations.getSchedule", + "azure.ai.projects.AIProjectClient.evaluations.create_or_replace_schedule": "Azure.AI.Projects.Evaluations.createOrReplaceSchedule", + "azure.ai.projects.AIProjectClient.evaluations.list_schedule": "Azure.AI.Projects.Evaluations.listSchedule", + "azure.ai.projects.AIProjectClient.evaluations.disable_schedule": "Azure.AI.Projects.Evaluations.disableSchedule" + } +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py index b3e215c68df7..853feb83403d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_client.py @@ -36,9 +36,9 @@ class AIProjectClient: :vartype evaluations: azure.ai.projects.operations.EvaluationsOperations :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\\\ , where - :code:`` is the Azure region where the project is deployed (e.g. westus) and - :code:`` is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -119,12 +119,16 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: request_copy = deepcopy(request) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py index 9b8efcae3c2b..3bbeed37a0c7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_configuration.py @@ -24,9 +24,9 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\ , where :code:`` - is the Azure region where the project is deployed (e.g. westus) and :code:`` - is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py index 1057faa04d88..7bfca0ef6b4a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_client.py @@ -36,9 +36,9 @@ class AIProjectClient: :vartype evaluations: azure.ai.projects.aio.operations.EvaluationsOperations :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\\\ , where - :code:`` is the Azure region where the project is deployed (e.g. westus) and - :code:`` is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -121,12 +121,16 @@ def send_request( request_copy = deepcopy(request) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py index f9fb99fbc947..48b480a960b7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_configuration.py @@ -24,9 +24,9 @@ class AIProjectClientConfiguration: # pylint: disable=too-many-instance-attribu :param endpoint: The Azure AI Foundry project endpoint, in the form ``https://.api.azureml.ms`` or - ``https://..api.azureml.ms``\\ , where :code:`` - is the Azure region where the project is deployed (e.g. westus) and :code:`` - is the GUID of the Enterprise private link. Required. + ``https://..api.azureml.ms``, where is the + Azure region where the project is deployed (e.g. westus) and is the GUID of + the Enterprise private link. Required. :type endpoint: str :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index fc32e1aeaf02..ddb915d67b2a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -9,7 +9,20 @@ from io import IOBase import json import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TYPE_CHECKING, TypeVar, Union, overload +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + IO, + List, + Optional, + TYPE_CHECKING, + TypeVar, + Union, + overload, +) import urllib.parse from azure.core import AsyncPipelineClient @@ -33,7 +46,7 @@ from ... import _model_base, models as _models from ..._model_base import SdkJSONEncoder, _deserialize from ..._serialization import Deserializer, Serializer -from ..._vendor import FileType, prepare_multipart_form_data +from ..._vendor import prepare_multipart_form_data from ...operations._operations import ( build_agents_cancel_run_request, build_agents_cancel_vector_store_file_batch_request, @@ -322,12 +335,16 @@ async def create_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -413,12 +430,16 @@ async def list_agents( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -478,12 +499,16 @@ async def get_agent(self, agent_id: str, **kwargs: Any) -> _models.Agent: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -729,12 +754,16 @@ async def update_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -794,12 +823,16 @@ async def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDelet params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -959,12 +992,16 @@ async def create_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1024,12 +1061,16 @@ async def get_thread(self, thread_id: str, **kwargs: Any) -> _models.AgentThread params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1192,12 +1233,16 @@ async def update_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1257,12 +1302,16 @@ async def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDe params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1310,12 +1359,11 @@ async def create_message( :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -1393,12 +1441,11 @@ async def create_message( :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -1453,12 +1500,16 @@ async def create_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1552,12 +1603,16 @@ async def list_messages( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1620,12 +1675,16 @@ async def get_message(self, thread_id: str, message_id: str, **kwargs: Any) -> _ params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1785,12 +1844,16 @@ async def update_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1875,8 +1938,8 @@ async def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2045,8 +2108,8 @@ async def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2155,12 +2218,16 @@ async def create_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2250,12 +2317,16 @@ async def list_runs( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2318,12 +2389,16 @@ async def get_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.T params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2483,12 +2558,16 @@ async def update_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2660,12 +2739,16 @@ async def submit_tool_outputs_to_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2728,12 +2811,16 @@ async def cancel_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _model params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2808,8 +2895,8 @@ async def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -2942,8 +3029,8 @@ async def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3050,12 +3137,16 @@ async def create_thread_and_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3135,12 +3226,16 @@ async def get_run_step( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3241,12 +3336,16 @@ async def list_run_steps( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3310,12 +3409,16 @@ async def list_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3346,20 +3449,11 @@ async def list_files( return deserialized # type: ignore @overload - async def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: + async def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Required. + :type body: ~azure.ai.projects.models.UploadFileRequest :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3369,7 +3463,7 @@ async def upload_file( async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Required. + :param body: Multipart body. Required. :type body: JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile @@ -3377,28 +3471,11 @@ async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """ @distributed_trace_async - async def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any - ) -> _models.OpenAIFile: + async def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3416,13 +3493,6 @@ async def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -3436,12 +3506,16 @@ async def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3501,12 +3575,16 @@ async def delete_file(self, file_id: str, **kwargs: Any) -> _models.FileDeletion params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3566,12 +3644,16 @@ async def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3602,13 +3684,13 @@ async def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: return deserialized # type: ignore @distributed_trace_async - async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: + async def _get_file_content(self, file_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Retrieves the raw content of a specific file. :param file_id: The ID of the file to retrieve. Required. :type file_id: str - :return: bytes - :rtype: bytes + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3622,7 +3704,7 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[bytes] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_agents_get_file_content_request( file_id=file_id, @@ -3631,16 +3713,20 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = kwargs.pop("stream", True) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -3656,10 +3742,7 @@ async def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(bytes, response.json(), format="base64") + deserialized = response.iter_bytes() if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -3722,12 +3805,16 @@ async def list_vector_stores( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3908,12 +3995,16 @@ async def create_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3973,12 +4064,16 @@ async def get_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4139,12 +4234,16 @@ async def modify_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4205,12 +4304,16 @@ async def delete_vector_store(self, vector_store_id: str, **kwargs: Any) -> _mod params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4305,12 +4408,16 @@ async def list_vector_store_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4467,12 +4574,16 @@ async def create_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4535,12 +4646,16 @@ async def get_vector_store_file(self, vector_store_id: str, file_id: str, **kwar params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4608,12 +4723,16 @@ async def delete_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4770,12 +4889,16 @@ async def create_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4840,12 +4963,16 @@ async def get_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4911,12 +5038,16 @@ async def cancel_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5015,12 +5146,16 @@ async def list_vector_store_file_batch_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5095,12 +5230,16 @@ async def _get_workspace(self, **kwargs: Any) -> _models._models.GetWorkspaceRes params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5178,12 +5317,16 @@ async def _list_connections( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5245,12 +5388,16 @@ async def _get_connection(self, connection_name: str, **kwargs: Any) -> _models. params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5346,12 +5493,16 @@ async def _get_connection_with_secrets( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5436,12 +5587,16 @@ async def _get_app_insights( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5521,12 +5676,16 @@ async def get(self, id: str, **kwargs: Any) -> _models.Evaluation: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5649,12 +5808,16 @@ async def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], * params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5724,14 +5887,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5749,14 +5916,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5764,7 +5935,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Evaluation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -5890,12 +6061,16 @@ async def update( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5961,12 +6136,16 @@ async def get_schedule(self, name: str, **kwargs: Any) -> _models.EvaluationSche params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6104,12 +6283,16 @@ async def create_or_replace_schedule( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6184,14 +6367,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6209,14 +6396,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6224,7 +6415,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized["value"]) + list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -6276,12 +6467,16 @@ async def disable_schedule(self, name: str, **kwargs: Any) -> None: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a44d43c67e3b..a3e6b9287cd0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -29,6 +29,7 @@ AzureFunctionDefinition, AzureFunctionStorageQueue, AzureFunctionToolDefinition, + BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, @@ -115,6 +116,7 @@ RunStepCodeInterpreterToolCallDetails, RunStepCodeInterpreterToolCallOutput, RunStepCompletionUsage, + RunStepCustomSearchToolCall, RunStepDelta, RunStepDeltaChunk, RunStepDeltaCodeInterpreterDetailItemObject, @@ -141,9 +143,12 @@ RunStepMessageCreationDetails, RunStepMessageCreationReference, RunStepMicrosoftFabricToolCall, + RunStepOpenAPIToolCall, RunStepSharepointToolCall, RunStepToolCall, RunStepToolCallDetails, + SearchConfiguration, + SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, SubmitToolOutputsDetails, @@ -163,6 +168,7 @@ UpdateCodeInterpreterToolResourceOptions, UpdateFileSearchToolResourceOptions, UpdateToolResourcesOptions, + UploadFileRequest, VectorStore, VectorStoreAutoChunkingStrategyRequest, VectorStoreAutoChunkingStrategyResponse, @@ -244,6 +250,7 @@ "AzureFunctionDefinition", "AzureFunctionStorageQueue", "AzureFunctionToolDefinition", + "BingCustomSearchToolDefinition", "BingGroundingToolDefinition", "CodeInterpreterToolDefinition", "CodeInterpreterToolResource", @@ -330,6 +337,7 @@ "RunStepCodeInterpreterToolCallDetails", "RunStepCodeInterpreterToolCallOutput", "RunStepCompletionUsage", + "RunStepCustomSearchToolCall", "RunStepDelta", "RunStepDeltaChunk", "RunStepDeltaCodeInterpreterDetailItemObject", @@ -356,9 +364,12 @@ "RunStepMessageCreationDetails", "RunStepMessageCreationReference", "RunStepMicrosoftFabricToolCall", + "RunStepOpenAPIToolCall", "RunStepSharepointToolCall", "RunStepToolCall", "RunStepToolCallDetails", + "SearchConfiguration", + "SearchConfigurationList", "SharepointToolDefinition", "SubmitToolOutputsAction", "SubmitToolOutputsDetails", @@ -378,6 +389,7 @@ "UpdateCodeInterpreterToolResourceOptions", "UpdateFileSearchToolResourceOptions", "UpdateToolResourcesOptions", + "UploadFileRequest", "VectorStore", "VectorStoreAutoChunkingStrategyRequest", "VectorStoreAutoChunkingStrategyResponse", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 70c4fd3daa6a..9e780544fea8 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -16,7 +16,7 @@ class AgentsApiResponseFormatMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): AUTO = "auto" """Default value. Let the model handle the return format.""" NONE = "none" - """Setting the value to ``none``\\ , will result in a 400 Bad request.""" + """Setting the value to ``none``, will result in a 400 Bad request.""" class AgentsApiToolChoiceOptionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -45,6 +45,8 @@ class AgentsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tool type ``sharepoint_grounding``""" AZURE_AI_SEARCH = "azure_ai_search" """Tool type ``azure_ai_search``""" + BING_CUSTOM_SEARCH = "bing_custom_search" + """Tool type ``bing_custom_search``""" class AgentStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index aefa9d474688..9a866cc9d1bd 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -13,6 +13,7 @@ from .. import _model_base from .._model_base import rest_discriminator, rest_field +from .._vendor import FileType from ._enums import ( AuthenticationType, OpenApiAuthType, @@ -223,9 +224,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentsNamedToolChoice(_model_base.Model): """Specifies a tool the model should use. Use to force the model to call a specific tool. - :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. + :ivar type: the type of tool. If type is ``function``, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", - "fabric_dataagent", "sharepoint_grounding", and "azure_ai_search". + "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", and "bing_custom_search". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType :ivar function: The name of the function to call. :vartype function: ~azure.ai.projects.models.FunctionName @@ -234,9 +235,10 @@ class AgentsNamedToolChoice(_model_base.Model): type: Union[str, "_models.AgentsNamedToolChoiceType"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """the type of tool. If type is \"function\" , the function name must be set. Required. Known + """the type of tool. If type is ``function``, the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", - \"fabric_dataagent\", \"sharepoint_grounding\", and \"azure_ai_search\".""" + \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", and + \"bing_custom_search\".""" function: Optional["_models.FunctionName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call.""" @@ -656,9 +658,10 @@ class ToolDefinition(_model_base.Model): """An abstract representation of an input tool definition that an agent can use. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingGroundingToolDefinition, - CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, FileSearchToolDefinition, - FunctionToolDefinition, OpenApiToolDefinition, SharepointToolDefinition + AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingCustomSearchToolDefinition, + BingGroundingToolDefinition, CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, + FileSearchToolDefinition, FunctionToolDefinition, OpenApiToolDefinition, + SharepointToolDefinition :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -870,6 +873,43 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="azure_function", **kwargs) +class BingCustomSearchToolDefinition(ToolDefinition, discriminator="bing_custom_search"): + """The input definition information for a Bing custom search tool as used to configure an agent. + + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: The list of search configurations used by the bing custom search + tool. Required. + :vartype bing_custom_search: ~azure.ai.projects.models.SearchConfigurationList + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: "_models.SearchConfigurationList" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The list of search configurations used by the bing custom search tool. Required.""" + + @overload + def __init__( + self, + *, + bing_custom_search: "_models.SearchConfigurationList", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class BingGroundingToolDefinition(ToolDefinition, discriminator="bing_grounding"): """The input definition information for a bing grounding search tool as used to configure an agent. @@ -3805,6 +3845,10 @@ class OpenApiFunctionDefinition(_model_base.Model): :vartype spec: any :ivar auth: Open API authentication details. Required. :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] + :ivar functions: List of functions returned in response. + :vartype functions: list[~azure.ai.projects.models.FunctionDefinition] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3816,6 +3860,12 @@ class OpenApiFunctionDefinition(_model_base.Model): """The openapi function shape, described as a JSON Schema object. Required.""" auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Open API authentication details. Required.""" + default_params: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" + functions: Optional[List["_models.FunctionDefinition"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of functions returned in response.""" @overload def __init__( @@ -3825,6 +3875,8 @@ def __init__( spec: Any, auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, + default_params: Optional[List[str]] = None, + functions: Optional[List["_models.FunctionDefinition"]] = None, ) -> None: ... @overload @@ -4255,7 +4307,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class RunCompletionUsage(_model_base.Model): """Usage statistics related to the run. This value will be ``null`` if the run is not in a - terminal state (i.e. ``in_progress``\\ , ``queued``\\ , etc.). + terminal state (i.e. ``in_progress``, ``queued``, etc.). :ivar completion_tokens: Number of completion tokens used over the course of the run. Required. :vartype completion_tokens: int @@ -4461,9 +4513,9 @@ class RunStepToolCall(_model_base.Model): existing run. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - RunStepAzureAISearchToolCall, RunStepBingGroundingToolCall, RunStepCodeInterpreterToolCall, - RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, RunStepFunctionToolCall, - RunStepSharepointToolCall + RunStepAzureAISearchToolCall, RunStepCustomSearchToolCall, RunStepBingGroundingToolCall, + RunStepCodeInterpreterToolCall, RunStepMicrosoftFabricToolCall, RunStepFileSearchToolCall, + RunStepFunctionToolCall, RunStepOpenAPIToolCall, RunStepSharepointToolCall :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -4823,6 +4875,46 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class RunStepCustomSearchToolCall(RunStepToolCall, discriminator="bing_custom_search"): + """A record of a call to a bing custom search tool, issued by the model in evaluation of a defined + tool, that represents + executed search with bing custom search. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is + "bing_custom_search". + :vartype type: str + :ivar bing_custom_search: Reserved for future use. Required. + :vartype bing_custom_search: dict[str, str] + """ + + type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required. Default value is + \"bing_custom_search\".""" + bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + bing_custom_search: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="bing_custom_search", **kwargs) + + class RunStepDelta(_model_base.Model): """Represents the delta payload in a streaming run step delta chunk. @@ -5802,6 +5894,44 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type="fabric_dataagent", **kwargs) +class RunStepOpenAPIToolCall(RunStepToolCall, discriminator="openapi"): + """A record of a call to an OpenAPI tool, issued by the model in evaluation of a defined tool, + that represents + executed OpenAPI operations. + + :ivar id: The ID of the tool call. This ID must be referenced when you submit tool outputs. + Required. + :vartype id: str + :ivar type: The object type, which is always 'openapi'. Required. Default value is "openapi". + :vartype type: str + :ivar open_api: Reserved for future use. Required. + :vartype open_api: dict[str, str] + """ + + type: Literal["openapi"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required. Default value is \"openapi\".""" + open_api: Dict[str, str] = rest_field(name="openapi", visibility=["read", "create", "update", "delete", "query"]) + """Reserved for future use. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + open_api: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="openapi", **kwargs) + + class RunStepSharepointToolCall(RunStepToolCall, discriminator="sharepoint_grounding"): """A record of a call to a SharePoint tool, issued by the model in evaluation of a defined tool, that represents @@ -5878,6 +6008,72 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, type=RunStepType.TOOL_CALLS, **kwargs) +class SearchConfiguration(_model_base.Model): + """A custom search configuration. + + :ivar connection_id: A connection in a ToolConnectionList attached to this tool. Required. + :vartype connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + """ + + connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A connection in a ToolConnectionList attached to this tool. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + + @overload + def __init__( + self, + *, + connection_id: str, + instance_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SearchConfigurationList(_model_base.Model): + """A list of search configurations currently used by the ``bing_custom_search`` tool. + + :ivar search_configurations: The connections attached to this tool. There can be a maximum of 1 + connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.SearchConfiguration] + """ + + search_configurations: List["_models.SearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + search_configurations: List["_models.SearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class SharepointToolDefinition(ToolDefinition, discriminator="sharepoint_grounding"): """The input definition information for a sharepoint tool as used to configure an agent. @@ -6172,12 +6368,11 @@ class ThreadMessageOptions(_model_base.Model): :ivar role: The role of the entity that is creating the message. Allowed values include: - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: "user" and "assistant". + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Required. Known values are: "user" and "assistant". :vartype role: str or ~azure.ai.projects.models.MessageRole :ivar content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -6195,13 +6390,11 @@ class ThreadMessageOptions(_model_base.Model): role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The role of the entity that is creating the message. Allowed values include: - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: \"user\" and \"assistant\". - """ + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Required. Known values are: \"user\" and \"assistant\".""" content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -6284,7 +6477,7 @@ class ThreadRun(_model_base.Model): not incomplete. Required. :vartype incomplete_details: ~azure.ai.projects.models.IncompleteRunDetails :ivar usage: Usage statistics related to the run. This value will be ``null`` if the run is not - in a terminal state (i.e. ``in_progress``\\ , ``queued``\\ , etc.). Required. + in a terminal state (i.e. ``in_progress``, ``queued``, etc.). Required. :vartype usage: ~azure.ai.projects.models.RunCompletionUsage :ivar temperature: The sampling temperature used for this run. If not set, defaults to 1. :vartype temperature: float @@ -6613,9 +6806,9 @@ class TruncationObject(_model_base.Model): context window of the run. :ivar type: The truncation strategy to use for the thread. The default is ``auto``. If set to - ``last_messages``\\ , the thread will + ``last_messages``, the thread will be truncated to the ``lastMessages`` count most recent messages in the thread. When set to - ``auto``\\ , messages in the middle of the thread + ``auto``, messages in the middle of the thread will be dropped to fit the context length of the model, ``max_prompt_tokens``. Required. Known values are: "auto" and "last_messages". :vartype type: str or ~azure.ai.projects.models.TruncationStrategy @@ -6630,7 +6823,7 @@ class TruncationObject(_model_base.Model): """The truncation strategy to use for the thread. The default is ``auto``. If set to ``last_messages``, the thread will be truncated to the ``lastMessages`` count most recent messages in the thread. When set to - ``auto`` , messages in the middle of the thread + ``auto``, messages in the middle of the thread will be dropped to fit the context length of the model, ``max_prompt_tokens``. Required. Known values are: \"auto\" and \"last_messages\".""" last_messages: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6767,6 +6960,52 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class UploadFileRequest(_model_base.Model): + """UploadFileRequest. + + :ivar file: The file data, in bytes. Required. + :vartype file: ~azure.ai.projects._vendor.FileType + :ivar purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Required. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". + :vartype purpose: str or ~azure.ai.projects.models.FilePurpose + :ivar filename: The name of the file. + :vartype filename: str + """ + + file: FileType = rest_field( + visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True + ) + """The file data, in bytes. Required.""" + purpose: Union[str, "_models.FilePurpose"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The intended purpose of the uploaded file. Use ``assistants`` for Agents and Message files, + ``vision`` for Agents image file inputs, ``batch`` for Batch API, and ``fine-tune`` for + Fine-tuning. Required. Known values are: \"fine-tune\", \"fine-tune-results\", \"assistants\", + \"assistants_output\", \"batch\", \"batch_output\", and \"vision\".""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file.""" + + @overload + def __init__( + self, + *, + file: FileType, + purpose: Union[str, "_models.FilePurpose"], + filename: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class VectorStore(_model_base.Model): """A vector store is a collection of processed files can be used by the ``file_search`` tool. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 205b2ff5ff70..8de30b3222cb 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -781,7 +781,7 @@ def __init__( :type index_connection_id: str :param index_name: Name of Index in search resource to be used by tool. :type index_name: str - :param query_type: Type of query in an AIIndexResource attached to this agent. + :param query_type: Type of query in an AIIndexResource attached to this agent. Default value is AzureAISearchQueryType.SIMPLE. :type query_type: AzureAISearchQueryType :param filter: Odata filter string for search resource. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index bacb9513b236..7df55ab6d2d6 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -9,7 +9,7 @@ from io import IOBase import json import sys -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TYPE_CHECKING, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TYPE_CHECKING, TypeVar, Union, overload import urllib.parse from azure.core import PipelineClient @@ -33,7 +33,7 @@ from .._configuration import AIProjectClientConfiguration from .._model_base import SdkJSONEncoder, _deserialize from .._serialization import Deserializer, Serializer -from .._vendor import FileType, prepare_multipart_form_data +from .._vendor import prepare_multipart_form_data if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -778,7 +778,7 @@ def build_agents_get_file_content_request(file_id: str, **kwargs: Any) -> HttpRe _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01-preview")) - accept = _headers.pop("Accept", "application/json") + accept = _headers.pop("Accept", "application/octet-stream") # Construct URL _url = "/files/{fileId}/content" @@ -1731,12 +1731,16 @@ def create_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1822,12 +1826,16 @@ def list_agents( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1887,12 +1895,16 @@ def get_agent(self, agent_id: str, **kwargs: Any) -> _models.Agent: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2138,12 +2150,16 @@ def update_agent( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2203,12 +2219,16 @@ def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDeletionSta params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2368,12 +2388,16 @@ def create_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2433,12 +2457,16 @@ def get_thread(self, thread_id: str, **kwargs: Any) -> _models.AgentThread: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2601,12 +2629,16 @@ def update_thread( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2666,12 +2698,16 @@ def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDeletion params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2719,12 +2755,11 @@ def create_message( :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -2802,12 +2837,11 @@ def create_message( :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + * `user`: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * `assistant`: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -2862,12 +2896,16 @@ def create_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -2961,12 +2999,16 @@ def list_messages( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3029,12 +3071,16 @@ def get_message(self, thread_id: str, message_id: str, **kwargs: Any) -> _models params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3194,12 +3240,16 @@ def update_message( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3284,8 +3334,8 @@ def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3454,8 +3504,8 @@ def create_run( :keyword tools: The overridden list of enabled tools that the agent should use to run the thread. Default value is None. :paramtype tools: list[~azure.ai.projects.models.ToolDefinition] - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -3564,12 +3614,16 @@ def create_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3659,12 +3713,16 @@ def list_runs( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3727,12 +3785,16 @@ def get_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.ThreadR params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -3892,12 +3954,16 @@ def update_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4069,12 +4135,16 @@ def submit_tool_outputs_to_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4137,12 +4207,16 @@ def cancel_run(self, thread_id: str, run_id: str, **kwargs: Any) -> _models.Thre params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4217,8 +4291,8 @@ def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -4351,8 +4425,8 @@ def create_thread_and_run( :keyword tool_resources: Override the tools the agent can use for this run. This is useful for modifying the behavior on a per-run basis. Default value is None. :paramtype tool_resources: ~azure.ai.projects.models.UpdateToolResourcesOptions - :keyword stream_parameter: If ``true``\\ , returns a stream of events that happen during the - Run as server-sent events, + :keyword stream_parameter: If ``true``, returns a stream of events that happen during the Run + as server-sent events, terminating when the Run enters a terminal state with a ``data: [DONE]`` message. Default value is None. :paramtype stream_parameter: bool @@ -4459,12 +4533,16 @@ def create_thread_and_run( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4544,12 +4622,16 @@ def get_run_step( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4650,12 +4732,16 @@ def list_run_steps( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4719,12 +4805,16 @@ def list_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4755,20 +4845,11 @@ def list_files( return deserialized # type: ignore @overload - def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: + def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Required. + :type body: ~azure.ai.projects.models.UploadFileRequest :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -4778,7 +4859,7 @@ def upload_file( def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Required. + :param body: Multipart body. Required. :type body: JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile @@ -4786,28 +4867,11 @@ def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """ @distributed_trace - def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any - ) -> _models.OpenAIFile: + def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -4825,13 +4889,6 @@ def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -4845,12 +4902,16 @@ def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4910,12 +4971,16 @@ def delete_file(self, file_id: str, **kwargs: Any) -> _models.FileDeletionStatus params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -4975,12 +5040,16 @@ def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5011,13 +5080,13 @@ def get_file(self, file_id: str, **kwargs: Any) -> _models.OpenAIFile: return deserialized # type: ignore @distributed_trace - def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: + def _get_file_content(self, file_id: str, **kwargs: Any) -> Iterator[bytes]: """Retrieves the raw content of a specific file. :param file_id: The ID of the file to retrieve. Required. :type file_id: str - :return: bytes - :rtype: bytes + :return: Iterator[bytes] + :rtype: Iterator[bytes] :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5031,7 +5100,7 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[bytes] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_agents_get_file_content_request( file_id=file_id, @@ -5040,16 +5109,20 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = kwargs.pop("stream", False) + _stream = kwargs.pop("stream", True) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -5065,10 +5138,7 @@ def _get_file_content(self, file_id: str, **kwargs: Any) -> bytes: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(bytes, response.json(), format="base64") + deserialized = response.iter_bytes() if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -5131,12 +5201,16 @@ def list_vector_stores( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5317,12 +5391,16 @@ def create_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5382,12 +5460,16 @@ def get_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models.Vecto params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5548,12 +5630,16 @@ def modify_vector_store( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5614,12 +5700,16 @@ def delete_vector_store(self, vector_store_id: str, **kwargs: Any) -> _models.Ve params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5714,12 +5804,16 @@ def list_vector_store_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5876,12 +5970,16 @@ def create_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -5944,12 +6042,16 @@ def get_vector_store_file(self, vector_store_id: str, file_id: str, **kwargs: An params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6017,12 +6119,16 @@ def delete_vector_store_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6179,12 +6285,16 @@ def create_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6249,12 +6359,16 @@ def get_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6320,12 +6434,16 @@ def cancel_vector_store_file_batch( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6424,12 +6542,16 @@ def list_vector_store_file_batch_files( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6504,12 +6626,16 @@ def _get_workspace(self, **kwargs: Any) -> _models._models.GetWorkspaceResponse: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6587,12 +6713,16 @@ def _list_connections( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6654,12 +6784,16 @@ def _get_connection(self, connection_name: str, **kwargs: Any) -> _models._model params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6755,12 +6889,16 @@ def _get_connection_with_secrets( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6845,12 +6983,16 @@ def _get_app_insights( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -6930,12 +7072,16 @@ def get(self, id: str, **kwargs: Any) -> _models.Evaluation: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7056,12 +7202,16 @@ def create(self, evaluation: Union[_models.Evaluation, JSON, IO[bytes]], **kwarg params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7131,14 +7281,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7156,14 +7310,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7171,7 +7329,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Evaluation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.Evaluation], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -7297,12 +7455,16 @@ def update( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7368,12 +7530,16 @@ def get_schedule(self, name: str, **kwargs: Any) -> _models.EvaluationSchedule: params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7511,12 +7677,16 @@ def create_or_replace_schedule( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7591,14 +7761,18 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7616,14 +7790,18 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str" + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -7631,7 +7809,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized["value"]) + list_of_elem = _deserialize(List[_models.EvaluationSchedule], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -7683,12 +7861,16 @@ def disable_schedule(self, name: str, **kwargs: Any) -> None: # pylint: disable params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py index 0b712abea098..f1e98bf1be1a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py @@ -12,4 +12,5 @@ ) from exc from ._patch import patch_sdk as _patch_sdk, PromptTemplate + _patch_sdk() diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py index 166fcceb2f03..13fd07bcac99 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -109,7 +110,7 @@ def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> Li if self.prompty is not None: parsed = prepare(self.prompty, data) - return parsed # type: ignore + return parsed # type: ignore else: raise ValueError("Please provide valid prompt template") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py index 0d76e18a0a8c..5f81c81c6423 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_azure_ai_search.py @@ -52,11 +52,7 @@ # Initialize agent AI search tool and add the search index connection id ai_search = AzureAISearchTool( - index_connection_id=conn_id, - index_name="sample_index", - query_type=AzureAISearchQueryType.SIMPLE, - top_k=3, - filter="" + index_connection_id=conn_id, index_name="sample_index", query_type=AzureAISearchQueryType.SIMPLE, top_k=3, filter="" ) # Create agent with AI search tool and process assistant run diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py index 7a231513bbd6..895f445f2942 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -36,7 +37,7 @@ import jsonref from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme +from azure.ai.projects.models import OpenApiTool, OpenApiConnectionAuthDetails, OpenApiConnectionSecurityScheme project_client = AIProjectClient.from_connection_string( @@ -50,22 +51,21 @@ print(connection.id) -with open('./tripadvisor_openapi.json', 'r') as f: +with open("./tripadvisor_openapi.json", "r") as f: openapi_spec = jsonref.loads(f.read()) # Create Auth object for the OpenApiTool (note that connection or managed identity auth setup requires additional setup in Azure) auth = OpenApiConnectionAuthDetails(security_scheme=OpenApiConnectionSecurityScheme(connection_id=connection.id)) # Initialize an Agent OpenApi tool using the read in OpenAPI spec -openapi = OpenApiTool(name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth) +openapi = OpenApiTool( + name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth +) # Create an Agent with OpenApi tool and process Agent run with project_client: agent = project_client.agents.create_agent( - model=model_name, - name="my-agent", - instructions="You are a helpful agent", - tools=openapi.definitions + model=model_name, name="my-agent", instructions="You are a helpful agent", tools=openapi.definitions ) print(f"Created agent, ID: {agent.id}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py index c727d11f5913..1b61f348b4c5 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_bing_grounding.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py index 3b3248cb204f..a3bcc4663ca1 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_bing_grounding.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py index 355daa72f0e6..46064dab76df 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -68,8 +69,6 @@ ] messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) - response = client.complete( - model=model_deployment_name, messages=messages - ) + response = client.complete(model=model_deployment_name, messages=messages) print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py index 8c9a0f3a34a8..c30393246f5f 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty.py @@ -52,8 +52,6 @@ ] messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) - response = client.complete( - model=model_deployment_name, messages=messages - ) + response = client.complete(model=model_deployment_name, messages=messages) print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py b/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py index 34863ac43717..8686492e1050 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py +++ b/sdk/ai/azure-ai-projects/tests/agents/overload_assert_utils.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression import io import json import unittest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py index fd68eeb3d427..d03d934b4679 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression from typing import Iterator, List from unittest.mock import Mock, patch import pytest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py index f721502f5b5d..d48d6a2e20f7 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_models_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression from typing import Any, AsyncIterator, List from unittest.mock import AsyncMock, patch import pytest diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py b/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py index 1b60f28d302a..cf0e0ad0ca59 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agents_client.py @@ -33,7 +33,7 @@ AgentEventHandler, AgentStreamEvent, AgentThread, - AzureAISearchTool, + AzureAISearchTool, AzureFunctionStorageQueue, AzureFunctionTool, CodeInterpreterTool, @@ -90,7 +90,7 @@ azure_ai_projects_agents_tests_data_path="azureml://subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/rg-resour-cegr-oupfoo1/workspaces/abcd-abcdabcdabcda-abcdefghijklm/datastores/workspaceblobstore/paths/LocalUpload/000000000000/product_info_1.md", azure_ai_projects_agents_tests_storage_queue="https://foobar.queue.core.windows.net", azure_ai_projects_agents_tests_search_index_name="sample_index", - azure_ai_projects_agents_tests_search_connection_name="search_connection_name" + azure_ai_projects_agents_tests_search_connection_name="search_connection_name", ) @@ -2817,50 +2817,50 @@ def test_azure_ai_search_tool(self, **kwargs): with self.create_client(**kwargs) as client: assert isinstance(client, AIProjectClient) - # Create AzureAISearchTool - connection_name = kwargs.pop("azure_ai_projects_agents_tests_search_connection_name", "my-search-connection-name") + # Create AzureAISearchTool + connection_name = kwargs.pop( + "azure_ai_projects_agents_tests_search_connection_name", "my-search-connection-name" + ) connection = client.connections.get(connection_name=connection_name) conn_id = connection.id index_name = kwargs.pop("azure_ai_projects_agents_tests_search_index_name", "my-search-index") - + azure_search_tool = AzureAISearchTool( index_connection_id=conn_id, - index_name=index_name, + index_name=index_name, ) - + # Create agent with the search tool agent = client.agents.create_agent( model="gpt-4o", name="search-agent", instructions="You are a helpful assistant that can search for information using Azure AI Search.", tools=azure_search_tool.definitions, - tool_resources=azure_search_tool.resources + tool_resources=azure_search_tool.resources, ) assert agent.id print(f"Created agent with ID: {agent.id}") - + # Create thread thread = client.agents.create_thread() assert thread.id print(f"Created thread with ID: {thread.id}") - + # Create message message = client.agents.create_message( - thread_id=thread.id, - role="user", - content="Search for information about iPhone prices." + thread_id=thread.id, role="user", content="Search for information about iPhone prices." ) assert message.id print(f"Created message with ID: {message.id}") - + # Create and process run run = client.agents.create_and_process_run(thread_id=thread.id, agent_id=agent.id) assert run.status == RunStatus.COMPLETED, run.last_error.message - + # List messages to verify tool was used messages = client.agents.list_messages(thread_id=thread.id) assert len(messages.data) > 0 - + # Clean up client.agents.delete_agent(agent.id) print("Deleted agent") diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py b/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py index 422e2b7b1595..c460957fb54d 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_overload_assert.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression import unittest import pytest from azure.ai.projects.operations import AgentsOperations diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml index 08082bba7677..7d212665e707 100644 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/Azure.AI.Projects -commit: 47ce1b47aef1e5351de994508e32e6ac39128011 +commit: 7ec688b43e7357c29735039d1ebe6aeac84a9e05 repo: Azure/azure-rest-api-specs additionalDirectories: From 8f74eb968fe0b7f092ac034fe6a602a20f403ed9 Mon Sep 17 00:00:00 2001 From: Glenn Harper <64209257+glharper@users.noreply.github.com> Date: Fri, 4 Apr 2025 20:49:55 -0400 Subject: [PATCH 06/13] [AI] [Projects] OpenApi and Custom Bing Search changes (#40345) * [AI] [Projects] add latest TypeSpec commit * auto-genned code in PR branch * update custom code with openapi optional parameter * update openapi sample to use default_parameters param * add bing custom search tool definition * add bing custom search sample * mypy error * revert bad doc strings * fix tox sphinx * add content * revert operations upload_file methods * fix one lint issue * make default_parameter an optional * fix lint error - line too long * add type annotation to default_params * update snippets --- sdk/ai/azure-ai-projects/README.md | 10 +- .../ai/projects/aio/operations/_operations.py | 82 +++++++++++------ .../azure/ai/projects/models/_models.py | 28 +++--- .../azure/ai/projects/models/_patch.py | 85 ++++++++++++++++- .../ai/projects/operations/_operations.py | 85 +++++++++++------ .../sample_agents_bing_custom_search.py | 91 +++++++++++++++++++ .../samples/agents/sample_agents_openapi.py | 4 +- 7 files changed, 308 insertions(+), 77 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 1d2eb4b90ca5..c668b8ea19d0 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -463,11 +463,7 @@ print(conn_id) # Initialize agent AI search tool and add the search index connection id ai_search = AzureAISearchTool( - index_connection_id=conn_id, - index_name="sample_index", - query_type=AzureAISearchQueryType.SIMPLE, - top_k=3, - filter="" + index_connection_id=conn_id, index_name="sample_index", query_type=AzureAISearchQueryType.SIMPLE, top_k=3, filter="" ) # Create agent with AI search tool and process assistant run @@ -793,10 +789,10 @@ auth = OpenApiAnonymousAuthDetails() # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth + name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth, default_parameters=["format"] ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth + name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth, ) # Create agent with OpenApi tool and process assistant run diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index ddb915d67b2a..ef48b4456527 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -46,7 +46,7 @@ from ... import _model_base, models as _models from ..._model_base import SdkJSONEncoder, _deserialize from ..._serialization import Deserializer, Serializer -from ..._vendor import prepare_multipart_form_data +from ..._vendor import FileType, prepare_multipart_form_data from ...operations._operations import ( build_agents_cancel_run_request, build_agents_cancel_vector_store_file_batch_request, @@ -1359,11 +1359,12 @@ async def create_message( :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + + * ``user``\\ : Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -1441,11 +1442,11 @@ async def create_message( :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + * ``user``\\ : Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -3449,11 +3450,20 @@ async def list_files( return deserialized # type: ignore @overload - async def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: + async def upload_file( + self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any + ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Required. - :type body: ~azure.ai.projects.models.UploadFileRequest + :keyword file: The file data, in bytes. Required. + :paramtype file: ~azure.ai.projects._vendor.FileType + :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. + :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose + :keyword filename: The name of the file. Default value is None. + :paramtype filename: str :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3463,7 +3473,7 @@ async def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _ async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Required. + :param body: Required. :type body: JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile @@ -3471,11 +3481,28 @@ async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """ @distributed_trace_async - async def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: + async def upload_file( + self, + body: JSON = _Unset, + *, + file: FileType = _Unset, + purpose: Union[str, _models.FilePurpose] = _Unset, + filename: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. - :type body: ~azure.ai.projects.models.UploadFileRequest or JSON + :param body: Is one of the following types: JSON Required. + :type body: JSON + :keyword file: The file data, in bytes. Required. + :paramtype file: ~azure.ai.projects._vendor.FileType + :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. + :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose + :keyword filename: The name of the file. Default value is None. + :paramtype filename: str :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3493,6 +3520,13 @@ async def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwar cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) + if body is _Unset: + if file is _Unset: + raise TypeError("missing required argument: file") + if purpose is _Unset: + raise TypeError("missing required argument: purpose") + body = {"file": file, "filename": filename, "purpose": purpose} + body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -3506,16 +3540,12 @@ async def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwar params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True - ), - "projectName": self._serialize.url( - "self._config.project_name", self._config.project_name, "str", skip_quote=True + "self._config.resource_group_name", self._config.resource_group_name, "str" ), + "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 9a866cc9d1bd..8d8ceb0c68e8 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -224,7 +224,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentsNamedToolChoice(_model_base.Model): """Specifies a tool the model should use. Use to force the model to call a specific tool. - :ivar type: the type of tool. If type is ``function``, the function name must be set. Required. + :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", and "bing_custom_search". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType @@ -235,7 +235,7 @@ class AgentsNamedToolChoice(_model_base.Model): type: Union[str, "_models.AgentsNamedToolChoiceType"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """the type of tool. If type is ``function``, the function name must be set. Required. Known + """the type of tool. If type is \"function\" , the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", and \"bing_custom_search\".""" @@ -6368,11 +6368,13 @@ class ThreadMessageOptions(_model_base.Model): :ivar role: The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Required. Known values are: "user" and "assistant". + + * ``user``: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the conversation. + + Required. Known values are: "user" and "assistant". :vartype role: str or ~azure.ai.projects.models.MessageRole :ivar content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -6390,11 +6392,13 @@ class ThreadMessageOptions(_model_base.Model): role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Required. Known values are: \"user\" and \"assistant\".""" + * ``user``: Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``: Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the conversation. + + Required. Known values are: \"user\" and \"assistant\". + """ content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 8de30b3222cb..bafc8e4faaa4 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -51,6 +51,7 @@ AzureFunctionStorageQueue, AzureFunctionToolDefinition, AzureFunctionBinding, + BingCustomSearchToolDefinition, BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, @@ -71,6 +72,8 @@ RequiredFunctionToolCall, RunStep, RunStepDeltaChunk, + SearchConfiguration, + SearchConfigurationList, SharepointToolDefinition, SubmitToolOutputsAction, ThreadRun, @@ -834,7 +837,13 @@ class OpenApiTool(Tool[OpenApiToolDefinition]): this class also supports adding and removing additional API definitions dynamically. """ - def __init__(self, name: str, description: str, spec: Any, auth: OpenApiAuthDetails): + def __init__( + self, name: str, + description: str, + spec: Any, + auth: OpenApiAuthDetails, + default_parameters: Optional[List[str]] = None + ) -> None: """ Constructor initializes the tool with a primary API definition. @@ -843,11 +852,20 @@ def __init__(self, name: str, description: str, spec: Any, auth: OpenApiAuthDeta :param spec: The API specification. :param auth: Authentication details for the API. :type auth: OpenApiAuthDetails + :param default_parameters: List of OpenAPI spec parameters that will use user-provided defaults. + :type default_parameters: OpenApiAuthDetails """ + default_params: List[str] = [] if default_parameters is None else default_parameters self._default_auth = auth self._definitions: List[OpenApiToolDefinition] = [ OpenApiToolDefinition( - openapi=OpenApiFunctionDefinition(name=name, description=description, spec=spec, auth=auth) + openapi=OpenApiFunctionDefinition( + name=name, + description=description, + spec=spec, + auth=auth, + default_params=default_params + ) ) ] @@ -861,7 +879,14 @@ def definitions(self) -> List[OpenApiToolDefinition]: """ return self._definitions - def add_definition(self, name: str, description: str, spec: Any, auth: Optional[OpenApiAuthDetails] = None) -> None: + def add_definition( + self, + name: str, + description: str, + spec: Any, + auth: Optional[OpenApiAuthDetails] = None, + default_parameters: Optional[List[str]] = None + ) -> None: """ Adds a new API definition dynamically. Raises a ValueError if a definition with the same name already exists. @@ -875,8 +900,12 @@ def add_definition(self, name: str, description: str, spec: Any, auth: Optional[ :param auth: Optional authentication details for this particular API definition. If not provided, the tool's default authentication details will be used. :type auth: Optional[OpenApiAuthDetails] + :param default_parameters: List of OpenAPI spec parameters that will use user-provided defaults. + :type default_parameters: List[str] :raises ValueError: If a definition with the same name exists. """ + default_params: List[str] = [] if default_parameters is None else default_parameters + # Check if a definition with the same name exists. if any(definition.openapi.name == name for definition in self._definitions): raise ValueError(f"Definition '{name}' already exists and cannot be added again.") @@ -885,7 +914,13 @@ def add_definition(self, name: str, description: str, spec: Any, auth: Optional[ auth_to_use = auth if auth is not None else self._default_auth new_definition = OpenApiToolDefinition( - openapi=OpenApiFunctionDefinition(name=name, description=description, spec=spec, auth=auth_to_use) + openapi=OpenApiFunctionDefinition( + name=name, + description=description, + spec=spec, + auth=auth_to_use, + default_params=default_params + ) ) self._definitions.append(new_definition) @@ -1019,6 +1054,47 @@ def definitions(self) -> List[BingGroundingToolDefinition]: """ return [BingGroundingToolDefinition(bing_grounding=ToolConnectionList(connection_list=self.connection_ids))] +class BingCustomSearchTool(Tool[BingCustomSearchToolDefinition]): + """ + A tool that searches for information using Bing Custom Search. + """ + + def __init__(self, connection_id: str, instance_name: str): + """ + Initialize Bing Custom Search with a connection_id. + + :param connection_id: Connection ID used by tool. Bing Custom Search tools allow only one connection. + :param instance_name: Config instance name used by tool. + """ + self.connection_ids = [SearchConfiguration(connection_id=connection_id, instance_name=instance_name)] + + @property + def definitions(self) -> List[BingCustomSearchToolDefinition]: + """ + Get the Bing grounding tool definitions. + + :rtype: List[ToolDefinition] + """ + return [ + BingCustomSearchToolDefinition( + bing_custom_search=SearchConfigurationList( + search_configurations=self.connection_ids + ) + ) + ] + + @property + def resources(self) -> ToolResources: + """ + Get the connection tool resources. + + :rtype: ToolResources + """ + return ToolResources() + + def execute(self, tool_call: Any) -> Any: + pass + class FabricTool(ConnectionTool[MicrosoftFabricToolDefinition]): """ @@ -1855,6 +1931,7 @@ def get_last_text_message_by_role(self, role: MessageRole) -> Optional[MessageTe "FileSearchTool", "FunctionTool", "OpenApiTool", + "BingCustomSearchTool", "BingGroundingTool", "StreamEventData", "SharepointTool", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 7df55ab6d2d6..22e49bedefc0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -33,7 +33,7 @@ from .._configuration import AIProjectClientConfiguration from .._model_base import SdkJSONEncoder, _deserialize from .._serialization import Deserializer, Serializer -from .._vendor import prepare_multipart_form_data +from .._vendor import FileType, prepare_multipart_form_data if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -2755,11 +2755,13 @@ def create_message( :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + + * ``user``\\ : Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. + :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -2837,11 +2839,12 @@ def create_message( :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - * `user`: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * `assistant`: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + + * ``user``\\ : Indicates the message is sent by an actual user and should be used in most + cases to represent user-generated messages. + * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert + messages from the agent into the + conversation. Known values are: "user" and "assistant". Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole :keyword content: The textual content of the initial message. Currently, robust input including images and annotated text may only be provided via @@ -4844,12 +4847,22 @@ def list_files( return deserialized # type: ignore + @overload - def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: + def upload_file( + self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any + ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Required. - :type body: ~azure.ai.projects.models.UploadFileRequest + :keyword file: The file data, in bytes. Required. + :paramtype file: ~azure.ai.projects._vendor.FileType + :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. + :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose + :keyword filename: The name of the file. Default value is None. + :paramtype filename: str :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -4859,7 +4872,7 @@ def upload_file(self, body: _models.UploadFileRequest, **kwargs: Any) -> _models def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Required. + :param body: Required. :type body: JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile @@ -4867,11 +4880,28 @@ def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: """ @distributed_trace - def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: + def upload_file( + self, + body: JSON = _Unset, + *, + file: FileType = _Unset, + purpose: Union[str, _models.FilePurpose] = _Unset, + filename: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. - :type body: ~azure.ai.projects.models.UploadFileRequest or JSON + :param body: Is one of the following types: JSON Required. + :type body: JSON + :keyword file: The file data, in bytes. Required. + :paramtype file: ~azure.ai.projects._vendor.FileType + :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and + Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and + ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", + "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. + :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose + :keyword filename: The name of the file. Default value is None. + :paramtype filename: str :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -4889,6 +4919,13 @@ def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: An cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) + if body is _Unset: + if file is _Unset: + raise TypeError("missing required argument: file") + if purpose is _Unset: + raise TypeError("missing required argument: purpose") + body = {"file": file, "filename": filename, "purpose": purpose} + body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -4902,16 +4939,12 @@ def upload_file(self, body: Union[_models.UploadFileRequest, JSON], **kwargs: An params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - "subscriptionId": self._serialize.url( - "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True - ), - "projectName": self._serialize.url( - "self._config.project_name", self._config.project_name, "str", skip_quote=True + "self._config.resource_group_name", self._config.resource_group_name, "str" ), + "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py new file mode 100644 index 000000000000..44825283345b --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py @@ -0,0 +1,91 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use agent operations with the Bing Custom Search tool from + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_bing_custom_search.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. + 3) BING_CUSTOM_CONNECTION_NAME - The connection name of the Bing Custom Search connection, as found in the + "Connected resources" tab in your Azure AI Foundry project. +""" + +import os +from azure.ai.projects import AIProjectClient +from azure.ai.projects.models import MessageRole, BingCustomSearchTool +from azure.identity import DefaultAzureCredential + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +# [START create_agent_with_bing_custom_search_tool] +bing_custom_connection = project_client.connections.get(connection_name=os.environ["BING_CUSTOM_CONNECTION_NAME"]) +conn_id = bing_custom_connection.id + +print(conn_id) + +# Initialize agent bing custom search tool and add the connection id +bing_custom_tool = BingCustomSearchTool(connection_id=conn_id, instance_name="") + +# Create agent with the bing custom search tool and process assistant run +with project_client: + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are a helpful assistant", + tools=bing_custom_tool.definitions, + headers={"x-ms-enable-preview": "true"}, + ) + # [END create_agent_with_bing_custom_search_tool] + + print(f"Created agent, ID: {agent.id}") + + # Create thread for communication + thread = project_client.agents.create_thread() + print(f"Created thread, ID: {thread.id}") + + # Create message to thread + message = project_client.agents.create_message( + thread_id=thread.id, + role=MessageRole.USER, + content="How many medals did the USA win in the 2024 summer olympics?", + ) + print(f"Created message, ID: {message.id}") + + # Create and process agent run in thread with tools + run = project_client.agents.create_and_process_run(thread_id=thread.id, agent_id=agent.id) + print(f"Run finished with status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + # Delete the assistant when done + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + # Print the Agent's response message with optional citation + response_message = project_client.agents.list_messages(thread_id=thread.id).get_last_message_by_role( + MessageRole.AGENT + ) + if response_message: + for text_message in response_message.text_messages: + print(f"Agent response: {text_message.text.value}") + for annotation in response_message.url_citation_annotations: + print(f"URL Citation: [{annotation.url_citation.title}]({annotation.url_citation.url})") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py index efb0691e8de5..16e1259bca1e 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py @@ -48,10 +48,10 @@ # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth + name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth, default_parameters=["format"] ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth + name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth, ) # Create agent with OpenApi tool and process assistant run From 2c005be873aaecbff34232fc37bf6a53e1f00a01 Mon Sep 17 00:00:00 2001 From: Glenn Harper <64209257+glharper@users.noreply.github.com> Date: Mon, 14 Apr 2025 11:33:58 -0400 Subject: [PATCH 07/13] [AI] [Projects] add List Threads operation (#40468) * [AI] [Projects] List Threads operation commit from TypeSpec * generated code * show sample usage of list_threads API * add doc link for sample * review feedback --- sdk/ai/azure-ai-projects/CHANGELOG.md | 4 + sdk/ai/azure-ai-projects/README.md | 13 ++ .../azure-ai-projects/apiview-properties.json | 2 + .../ai/projects/aio/operations/_operations.py | 96 +++++++++++++ .../azure/ai/projects/models/__init__.py | 2 + .../azure/ai/projects/models/_models.py | 50 +++++++ .../azure/ai/projects/models/_patch.py | 10 +- .../ai/projects/operations/_operations.py | 129 ++++++++++++++++++ .../samples/agents/sample_agents_basics.py | 13 ++ .../sample_agents_bing_custom_search.py | 1 + sdk/ai/azure-ai-projects/tsp-location.yaml | 2 +- 11 files changed, 317 insertions(+), 5 deletions(-) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index f1742d8d10e3..fe79fcb56fa1 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -4,8 +4,12 @@ ### Features added * Utilities to load prompt template strings and Prompty file content +* Add BingCustomSearchTool class with sample +* Add list_threads API to agents namespace ### Sample updates +* New BingCustomSearchTool sample +* Add list_threads usage to agent basics sample ### Bugs Fixed diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index c668b8ea19d0..0ff1065b5707 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -881,6 +881,19 @@ thread = project_client.agents.create_thread(tool_resources=file_search.resource ``` + +#### List Threads + +To list all threads attached to a given agent, use the list_threads API: + + + +```python +threads = project_client.agents.list_threads() +``` + + + #### Create Message To create a message for assistant to process, you pass `user` as `role` and a question as `content`: diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json index b19333997594..7164e456ca70 100644 --- a/sdk/ai/azure-ai-projects/apiview-properties.json +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -73,6 +73,7 @@ "azure.ai.projects.models.MicrosoftFabricToolDefinition": "Azure.AI.Projects.Agents.MicrosoftFabricToolDefinition", "azure.ai.projects.models.OpenAIFile": "Azure.AI.Projects.Agents.OpenAIFile", "azure.ai.projects.models.OpenAIPageableListOfAgent": "Azure.AI.Projects.Agents.OpenAIPageableListOf", + "azure.ai.projects.models.OpenAIPageableListOfAgentThread": "Azure.AI.Projects.Agents.OpenAIPageableListOf", "azure.ai.projects.models.OpenAIPageableListOfRunStep": "Azure.AI.Projects.Agents.OpenAIPageableListOf", "azure.ai.projects.models.OpenAIPageableListOfThreadMessage": "Azure.AI.Projects.Agents.OpenAIPageableListOf", "azure.ai.projects.models.OpenAIPageableListOfThreadRun": "Azure.AI.Projects.Agents.OpenAIPageableListOf", @@ -222,6 +223,7 @@ "azure.ai.projects.AIProjectClient.agents.get_thread": "Azure.AI.Projects.Agents.getThread", "azure.ai.projects.AIProjectClient.agents.update_thread": "Azure.AI.Projects.Agents.updateThread", "azure.ai.projects.AIProjectClient.agents.delete_thread": "Azure.AI.Projects.Agents.deleteThread", + "azure.ai.projects.AIProjectClient.agents.list_threads": "Azure.AI.Projects.Agents.listThreads", "azure.ai.projects.AIProjectClient.agents.create_message": "Azure.AI.Projects.Agents.createMessage", "azure.ai.projects.AIProjectClient.agents.list_messages": "Azure.AI.Projects.Agents.listMessages", "azure.ai.projects.AIProjectClient.agents.get_message": "Azure.AI.Projects.Agents.getMessage", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index ef48b4456527..895d5161947f 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -78,6 +78,7 @@ build_agents_list_messages_request, build_agents_list_run_steps_request, build_agents_list_runs_request, + build_agents_list_threads_request, build_agents_list_vector_store_file_batch_files_request, build_agents_list_vector_store_files_request, build_agents_list_vector_stores_request, @@ -1341,6 +1342,101 @@ async def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDe return deserialized # type: ignore + @distributed_trace_async + async def list_threads( + self, + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIPageableListOfAgentThread: + """Gets a list of threads that were previously created. + + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order + and desc for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.ListSortOrder + :keyword after: A cursor for use in pagination. after is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the + list. Default value is None. + :paramtype after: str + :keyword before: A cursor for use in pagination. before is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of + the list. Default value is None. + :paramtype before: str + :return: OpenAIPageableListOfAgentThread. The OpenAIPageableListOfAgentThread is compatible + with MutableMapping + :rtype: ~azure.ai.projects.models.OpenAIPageableListOfAgentThread + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.OpenAIPageableListOfAgentThread] = kwargs.pop("cls", None) + + _request = build_agents_list_threads_request( + limit=limit, + order=order, + after=after, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), + "resourceGroupName": self._serialize.url( + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.OpenAIPageableListOfAgentThread, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload async def create_message( self, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index a3e6b9287cd0..23a9fc738fa4 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -83,6 +83,7 @@ MicrosoftFabricToolDefinition, OpenAIFile, OpenAIPageableListOfAgent, + OpenAIPageableListOfAgentThread, OpenAIPageableListOfRunStep, OpenAIPageableListOfThreadMessage, OpenAIPageableListOfThreadRun, @@ -304,6 +305,7 @@ "MicrosoftFabricToolDefinition", "OpenAIFile", "OpenAIPageableListOfAgent", + "OpenAIPageableListOfAgentThread", "OpenAIPageableListOfRunStep", "OpenAIPageableListOfThreadMessage", "OpenAIPageableListOfThreadRun", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 8d8ceb0c68e8..be0fe642c098 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -3461,6 +3461,56 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.object: Literal["list"] = "list" +class OpenAIPageableListOfAgentThread(_model_base.Model): + """The response data for a requested list of items. + + :ivar object: The object type, which is always list. Required. Default value is "list". + :vartype object: str + :ivar data: The requested list of items. Required. + :vartype data: list[~azure.ai.projects.models.AgentThread] + :ivar first_id: The first ID represented in this list. Required. + :vartype first_id: str + :ivar last_id: The last ID represented in this list. Required. + :vartype last_id: str + :ivar has_more: A value indicating whether there are additional values available not captured + in this list. Required. + :vartype has_more: bool + """ + + object: Literal["list"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always list. Required. Default value is \"list\".""" + data: List["_models.AgentThread"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The requested list of items. Required.""" + first_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The first ID represented in this list. Required.""" + last_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The last ID represented in this list. Required.""" + has_more: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A value indicating whether there are additional values available not captured in this list. + Required.""" + + @overload + def __init__( + self, + *, + data: List["_models.AgentThread"], + first_id: str, + last_id: str, + has_more: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["list"] = "list" + + class OpenAIPageableListOfRunStep(_model_base.Model): """The response data for a requested list of items. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index bafc8e4faaa4..8f4ad6ab23bc 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -838,11 +838,12 @@ class OpenApiTool(Tool[OpenApiToolDefinition]): """ def __init__( - self, name: str, + self, + name: str, description: str, spec: Any, auth: OpenApiAuthDetails, - default_parameters: Optional[List[str]] = None + default_parameters: Optional[List[str]] = None, ) -> None: """ Constructor initializes the tool with a primary API definition. @@ -885,7 +886,7 @@ def add_definition( description: str, spec: Any, auth: Optional[OpenApiAuthDetails] = None, - default_parameters: Optional[List[str]] = None + default_parameters: Optional[List[str]] = None, ) -> None: """ Adds a new API definition dynamically. @@ -1054,6 +1055,7 @@ def definitions(self) -> List[BingGroundingToolDefinition]: """ return [BingGroundingToolDefinition(bing_grounding=ToolConnectionList(connection_list=self.connection_ids))] + class BingCustomSearchTool(Tool[BingCustomSearchToolDefinition]): """ A tool that searches for information using Bing Custom Search. @@ -1064,7 +1066,7 @@ def __init__(self, connection_id: str, instance_name: str): Initialize Bing Custom Search with a connection_id. :param connection_id: Connection ID used by tool. Bing Custom Search tools allow only one connection. - :param instance_name: Config instance name used by tool. + :param instance_name: Config instance name used by tool. """ self.connection_ids = [SearchConfiguration(connection_id=connection_id, instance_name=instance_name)] diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 22e49bedefc0..179075fbc124 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -279,6 +279,40 @@ def build_agents_delete_thread_request(thread_id: str, **kwargs: Any) -> HttpReq return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) +def build_agents_list_threads_request( + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/threads" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if limit is not None: + _params["limit"] = _SERIALIZER.query("limit", limit, "int") + if order is not None: + _params["order"] = _SERIALIZER.query("order", order, "str") + if after is not None: + _params["after"] = _SERIALIZER.query("after", after, "str") + if before is not None: + _params["before"] = _SERIALIZER.query("before", before, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + def build_agents_create_message_request(thread_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -2737,6 +2771,101 @@ def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDeletion return deserialized # type: ignore + @distributed_trace + def list_threads( + self, + *, + limit: Optional[int] = None, + order: Optional[Union[str, _models.ListSortOrder]] = None, + after: Optional[str] = None, + before: Optional[str] = None, + **kwargs: Any + ) -> _models.OpenAIPageableListOfAgentThread: + """Gets a list of threads that were previously created. + + :keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and + 100, and the default is 20. Default value is None. + :paramtype limit: int + :keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order + and desc for descending order. Known values are: "asc" and "desc". Default value is None. + :paramtype order: str or ~azure.ai.projects.models.ListSortOrder + :keyword after: A cursor for use in pagination. after is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the + list. Default value is None. + :paramtype after: str + :keyword before: A cursor for use in pagination. before is an object ID that defines your place + in the list. For instance, if you make a list request and receive 100 objects, ending with + obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of + the list. Default value is None. + :paramtype before: str + :return: OpenAIPageableListOfAgentThread. The OpenAIPageableListOfAgentThread is compatible + with MutableMapping + :rtype: ~azure.ai.projects.models.OpenAIPageableListOfAgentThread + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.OpenAIPageableListOfAgentThread] = kwargs.pop("cls", None) + + _request = build_agents_list_threads_request( + limit=limit, + order=order, + after=after, + before=before, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), + "resourceGroupName": self._serialize.url( + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.OpenAIPageableListOfAgentThread, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + @overload def create_message( self, diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py index f18f1425a031..a47bcf876051 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py @@ -49,6 +49,19 @@ thread = project_client.agents.create_thread() # [END create_thread] print(f"Created thread, thread ID: {thread.id}") + + # List all threads for the agent + # [START list_threads] + threads = project_client.agents.list_threads() + # [END list_threads] + + # Print thread information + print(f"Threads for agent {agent.id}:") + for current_thread in threads.data: + print(f"Thread ID: {current_thread.id}") + print(f"Created at: {current_thread.created_at}") + print(f"Metadata: {current_thread.metadata}") + print("---") # [START create_message] message = project_client.agents.create_message(thread_id=thread.id, role="user", content="Hello, tell me a joke") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py index 44825283345b..aed44c7747f0 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_bing_custom_search.py @@ -7,6 +7,7 @@ DESCRIPTION: This sample demonstrates how to use agent operations with the Bing Custom Search tool from the Azure Agents service using a synchronous client. + For more information on the Bing Custom Search tool, see: https://aka.ms/AgentCustomSearchDoc USAGE: python sample_agents_bing_custom_search.py diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml index 7d212665e707..37b86a929f37 100644 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/Azure.AI.Projects -commit: 7ec688b43e7357c29735039d1ebe6aeac84a9e05 +commit: 18f1b2507531ccb630ee66baf37652db84d4b520 repo: Azure/azure-rest-api-specs additionalDirectories: From 13784c551edbe01171791e5fe097a04955e19054 Mon Sep 17 00:00:00 2001 From: Glenn Harper <64209257+glharper@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:07:35 -0400 Subject: [PATCH 08/13] [AI] [Projects] Fix spec description in OpenApi sample (#40502) --- .../samples/agents/sample_agents_openapi_connection_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py index 895f445f2942..84e496023a30 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi_connection_auth.py @@ -59,7 +59,7 @@ # Initialize an Agent OpenApi tool using the read in OpenAPI spec openapi = OpenApiTool( - name="get_weather", spec=openapi_spec, description="Retrieve weather information for a location", auth=auth + name="get_location_reviews", spec=openapi_spec, description="Retrieve reviews for a given location", auth=auth ) # Create an Agent with OpenApi tool and process Agent run From 0a15609bc18aca2b548f54451716b1ef98b58712 Mon Sep 17 00:00:00 2001 From: Howie Leung Date: Tue, 15 Apr 2025 12:54:23 -0700 Subject: [PATCH 09/13] Redesign automatic toolcalls (#40342) * Redesign automatic toolcalls * fix test --- sdk/ai/azure-ai-projects/CHANGELOG.md | 3 + sdk/ai/azure-ai-projects/README.md | 28 ++++--- .../ai/projects/aio/operations/_patch.py | 79 +++++++++++++----- .../azure/ai/projects/models/_patch.py | 25 ++---- .../ai/projects/operations/_operations.py | 3 +- .../azure/ai/projects/operations/_patch.py | 81 ++++++++++++++----- .../sample_agents_run_with_toolset_async.py | 1 + ..._stream_eventhandler_with_toolset_async.py | 1 + .../samples/agents/multiagent/agent_team.py | 2 + .../multiagent/sample_agents_agent_team.py | 3 +- ...le_agents_agent_team_custom_team_leader.py | 2 + .../sample_agents_multi_agent_team.py | 6 ++ .../samples/agents/sample_agents_basics.py | 4 +- .../samples/agents/sample_agents_openapi.py | 11 ++- .../agents/sample_agents_run_with_toolset.py | 3 + ...agents_stream_eventhandler_with_toolset.py | 1 + ...le_agents_stream_iteration_with_toolset.py | 1 + .../tests/agents/test_agent_operations.py | 43 +++++----- .../agents/test_agent_operations_async.py | 46 +++++------ .../telemetry/test_ai_agents_instrumentor.py | 18 ++--- .../test_ai_agents_instrumentor_async.py | 16 +--- 21 files changed, 237 insertions(+), 140 deletions(-) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index fe79fcb56fa1..c690ff55ac6e 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -8,11 +8,14 @@ * Add list_threads API to agents namespace ### Sample updates +* Added `project_client.agents.enable_auto_function_calls(toolset=toolset)` to all samples that has `toolcalls` executed by `azure-ai-project` SDK * New BingCustomSearchTool sample * Add list_threads usage to agent basics sample ### Bugs Fixed +### Breaking Changes +The toolset parameter in `create_agents` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. ## 1.0.0b8 (2025-03-28) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 0ff1065b5707..a1403cfb1eeb 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -280,6 +280,9 @@ toolset = ToolSet() toolset.add(functions) toolset.add(code_interpreter) +# To enable tool calls executed automatically +project_client.agents.enable_auto_function_calls(toolset=toolset) + agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", @@ -509,12 +512,7 @@ for message in messages.data: #### Create Agent with Function Call -You can enhance your Agents by defining callback functions as function tools. These can be provided to `create_agent` via either the `toolset` parameter or the combination of `tools` and `tool_resources`. Here are the distinctions: - -- `toolset`: When using the `toolset` parameter, you provide not only the function definitions and descriptions but also their implementations. The SDK will execute these functions within `create_and_run_process` or `streaming` . These functions will be invoked based on their definitions. -- `tools` and `tool_resources`: When using the `tools` and `tool_resources` parameters, only the function definitions and descriptions are provided to `create_agent`, without the implementations. The `Run` or `event handler of stream` will raise a `requires_action` status based on the function definitions. Your code must handle this status and call the appropriate functions. - -For more details about calling functions by code, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) and [`sample_agents_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_functions.py). +You can enhance your Agents by defining callback functions as function tools. These can be provided to `create_agent` via either the `toolset` parameter or the combination of `tools` and `tool_resources`. For more details about requirements and specification of functions, refer to [Function Tool Specifications](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/FunctionTool.md) @@ -525,6 +523,7 @@ Here is an example to use [user functions](https://github.com/Azure/azure-sdk-fo functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) +project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], @@ -549,6 +548,7 @@ functions = AsyncFunctionTool(user_async_functions) toolset = AsyncToolSet() toolset.add(functions) +project_client.agents.enable_auto_function_calls(toolset=toolset) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], @@ -560,6 +560,9 @@ agent = await project_client.agents.create_agent( +Notices that if `enable_auto_function_calls` is called, the SDK will invoke the functions automatically during `create_and_process_run` or streaming. If you prefer to execute them manually, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) or +[`sample_agents_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_functions.py) + #### Create Agent With Azure Function Call The AI agent leverages Azure Functions triggered asynchronously via Azure Storage Queues. To enable the agent to perform Azure Function calls, you must set up the corresponding `AzureFunctionTool`, specifying input and output queues as well as parameter definitions. @@ -789,10 +792,17 @@ auth = OpenApiAnonymousAuthDetails() # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth, default_parameters=["format"] + name="get_weather", + spec=openapi_weather, + description="Retrieve weather information for a location", + auth=auth, + default_parameters=["format"], ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth, + name="get_countries", + spec=openapi_countries, + description="Retrieve a list of countries", + auth=auth, ) # Create agent with OpenApi tool and process assistant run @@ -997,7 +1007,7 @@ while run.status in ["queued", "in_progress", "requires_action"]: -To have the SDK poll on your behalf and call `function tools`, use the `create_and_process_run` method. Note that `function tools` will only be invoked if they are provided as `toolset` during the `create_agent` call. +To have the SDK poll on your behalf and call `function tools`, use the `create_and_process_run` method. Here is an example: diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index ca9fe7d7ad6a..95ec7a20ccc0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -27,6 +27,8 @@ TextIO, Union, cast, + Callable, + Set, overload, ) @@ -661,7 +663,7 @@ class AgentsOperations(AgentsOperationsGenerated): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self._toolset: Dict[str, _models.AsyncToolSet] = {} + self._function_tool = _models.AsyncFunctionTool(set()) # pylint: disable=arguments-differ @overload @@ -892,8 +894,6 @@ async def create_agent( **kwargs, ) - if toolset is not None: - self._toolset[new_agent.id] = toolset return new_agent # pylint: disable=arguments-differ @@ -1146,7 +1146,6 @@ async def update_agent( return await super().update_agent(body=body, **kwargs) if toolset is not None: - self._toolset[agent_id] = toolset tools = toolset.definitions tool_resources = toolset.resources @@ -1640,11 +1639,9 @@ async def create_and_process_run( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = toolset or self._toolset.get(run.agent_id) - if toolset: - tool_outputs = await toolset.execute_tool_calls(tool_calls) - else: - raise ValueError("Toolset is not available in the client.") + toolset = _models.AsyncToolSet() + toolset.add(self._function_tool) + tool_outputs = await toolset.execute_tool_calls(tool_calls) logging.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2333,13 +2330,13 @@ async def _handle_submit_tool_outputs( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. - if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = self._toolset.get(run.agent_id) - if toolset: - tool_outputs = await toolset.execute_tool_calls(tool_calls) - else: - logger.debug("Toolset is not available in the client.") - return + if ( + any(tool_call.type == "function" for tool_call in tool_calls) + and len(self._function_tool.definitions) > 0 + ): + toolset = _models.AsyncToolSet() + toolset.add(self._function_tool) + tool_outputs = await toolset.execute_tool_calls(tool_calls) logger.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -3124,10 +3121,56 @@ async def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDelet :rtype: ~azure.ai.projects.models.AgentDeletionStatus :raises ~azure.core.exceptions.HttpResponseError: """ - if agent_id in self._toolset: - del self._toolset[agent_id] return await super().delete_agent(agent_id, **kwargs) + @overload + def enable_auto_function_calls(self, *, functions: Set[Callable[..., Any]]) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + """ + + @overload + def enable_auto_function_calls(self, *, function_tool: _models.AsyncFunctionTool) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword function_tool: An AsyncFunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.AsyncFunctionTool] + """ + + @overload + def enable_auto_function_calls(self, *, toolset: _models.AsyncToolSet) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword toolset: An AsyncToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.AsyncToolSet] + """ + + def enable_auto_function_calls( + self, + *, + functions: Optional[Set[Callable[..., Any]]] = None, + function_tool: Optional[_models.AsyncFunctionTool] = None, + toolset: Optional[_models.AsyncToolSet] = None, + ) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + :keyword function_tool: An AsyncFunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.AsyncFunctionTool] + :keyword toolset: An AsyncToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.AsyncToolSet] + """ + if functions: + self._function_tool = _models.AsyncFunctionTool(functions) + elif function_tool: + self._function_tool = function_tool + elif toolset: + tool = toolset.get_tool(_models.AsyncFunctionTool) + self._function_tool = tool + class _SyncCredentialWrapper(TokenCredential): """ diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 8f4ad6ab23bc..ad865cc6adbe 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -693,7 +693,6 @@ def _get_func_and_args(self, tool_call: RequiredFunctionToolCall) -> Tuple[Any, arguments = tool_call.function.arguments if function_name not in self._functions: - logging.error("Function '%s' not found.", function_name) raise ValueError(f"Function '{function_name}' not found.") function = self._functions[function_name] @@ -701,11 +700,9 @@ def _get_func_and_args(self, tool_call: RequiredFunctionToolCall) -> Tuple[Any, try: parsed_arguments = json.loads(arguments) except json.JSONDecodeError as e: - logging.error("Invalid JSON arguments for function '%s': %s", function_name, e) raise ValueError(f"Invalid JSON arguments: {e}") from e if not isinstance(parsed_arguments, dict): - logging.error("Arguments must be a JSON object for function '%s'.", function_name) raise TypeError("Arguments must be a JSON object.") return function, parsed_arguments @@ -734,9 +731,8 @@ def resources(self) -> ToolResources: class FunctionTool(BaseFunctionTool): def execute(self, tool_call: RequiredFunctionToolCall) -> Any: - function, parsed_arguments = self._get_func_and_args(tool_call) - try: + function, parsed_arguments = self._get_func_and_args(tool_call) return function(**parsed_arguments) if parsed_arguments else function() except TypeError as e: error_message = f"Error executing function '{tool_call.function.name}': {e}" @@ -861,11 +857,7 @@ def __init__( self._definitions: List[OpenApiToolDefinition] = [ OpenApiToolDefinition( openapi=OpenApiFunctionDefinition( - name=name, - description=description, - spec=spec, - auth=auth, - default_params=default_params + name=name, description=description, spec=spec, auth=auth, default_params=default_params ) ) ] @@ -916,11 +908,7 @@ def add_definition( new_definition = OpenApiToolDefinition( openapi=OpenApiFunctionDefinition( - name=name, - description=description, - spec=spec, - auth=auth_to_use, - default_params=default_params + name=name, description=description, spec=spec, auth=auth_to_use, default_params=default_params ) ) self._definitions.append(new_definition) @@ -1079,9 +1067,7 @@ def definitions(self) -> List[BingCustomSearchToolDefinition]: """ return [ BingCustomSearchToolDefinition( - bing_custom_search=SearchConfigurationList( - search_configurations=self.connection_ids - ) + bing_custom_search=SearchConfigurationList(search_configurations=self.connection_ids) ) ] @@ -1390,7 +1376,8 @@ def execute_tool_calls(self, tool_calls: List[Any]) -> Any: } tool_outputs.append(tool_output) except Exception as e: # pylint: disable=broad-exception-caught - logging.error("Failed to execute tool call %s: %s", tool_call, e) + tool_output = {"tool_call_id": tool_call.id, "output": str(e)} + tool_outputs.append(tool_output) return tool_outputs diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 179075fbc124..84b7bd774e79 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -33,7 +33,7 @@ from .._configuration import AIProjectClientConfiguration from .._model_base import SdkJSONEncoder, _deserialize from .._serialization import Deserializer, Serializer -from .._vendor import FileType, prepare_multipart_form_data +from .._vendor import FileType, prepare_multipart_form_data if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -4976,7 +4976,6 @@ def list_files( return deserialized # type: ignore - @overload def upload_file( self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index 0db1e7c1a4b8..5b18bfc78dfd 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -24,6 +24,8 @@ Sequence, TextIO, Union, + Callable, + Set, cast, overload, ) @@ -841,7 +843,7 @@ class AgentsOperations(AgentsOperationsGenerated): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self._toolset: Dict[str, _models.ToolSet] = {} + self._function_tool = _models.FunctionTool(set()) # pylint: disable=arguments-differ @overload @@ -1073,8 +1075,6 @@ def create_agent( **kwargs, ) - if toolset is not None: - self._toolset[new_agent.id] = toolset return new_agent # pylint: disable=arguments-differ @@ -1327,7 +1327,6 @@ def update_agent( return super().update_agent(body=body, **kwargs) if toolset is not None: - self._toolset[agent_id] = toolset tools = toolset.definitions tool_resources = toolset.resources @@ -1823,11 +1822,9 @@ def create_and_process_run( # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = toolset or self._toolset.get(run.agent_id) - if toolset is not None: - tool_outputs = toolset.execute_tool_calls(tool_calls) - else: - raise ValueError("Toolset is not available in the client.") + toolset = _models.ToolSet() + toolset.add(self._function_tool) + tool_outputs = toolset.execute_tool_calls(tool_calls) logging.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -2520,13 +2517,14 @@ def _handle_submit_tool_outputs(self, run: _models.ThreadRun, event_handler: _mo # We need tool set only if we are executing local function. In case if # the tool is azure_function we just need to wait when it will be finished. - if any(tool_call.type == "function" for tool_call in tool_calls): - toolset = self._toolset.get(run.agent_id) - if toolset: - tool_outputs = toolset.execute_tool_calls(tool_calls) - else: - logger.debug("Toolset is not available in the client.") - return + if ( + any(tool_call.type == "function" for tool_call in tool_calls) + and len(self._function_tool.definitions) > 0 + ): + + toolset = _models.ToolSet() + toolset.add(self._function_tool) + tool_outputs = toolset.execute_tool_calls(tool_calls) logger.info("Tool outputs: %s", tool_outputs) if tool_outputs: @@ -3304,10 +3302,57 @@ def delete_agent(self, agent_id: str, **kwargs: Any) -> _models.AgentDeletionSta :rtype: ~azure.ai.projects.models.AgentDeletionStatus :raises ~azure.core.exceptions.HttpResponseError: """ - if agent_id in self._toolset: - del self._toolset[agent_id] return super().delete_agent(agent_id, **kwargs) + @overload + def enable_auto_function_calls(self, *, functions: Set[Callable[..., Any]]) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + """ + + @overload + def enable_auto_function_calls(self, *, function_tool: _models.FunctionTool) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword function_tool: A FunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.FunctionTool] + """ + + @overload + def enable_auto_function_calls(self, *, toolset: _models.ToolSet) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword toolset: A ToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.ToolSet] + """ + + @distributed_trace + def enable_auto_function_calls( + self, + *, + functions: Optional[Set[Callable[..., Any]]] = None, + function_tool: Optional[_models.FunctionTool] = None, + toolset: Optional[_models.ToolSet] = None, + ) -> None: + """Enables tool calls to be executed automatically during create_and_process_run or streaming. + If this is not set, functions must be called manually. + :keyword functions: A set of callable functions to be used as tools. + :type functions: Set[Callable[..., Any]] + :keyword function_tool: A FunctionTool object representing the tool to be used. + :type function_tool: Optional[_models.FunctionTool] + :keyword toolset: A ToolSet object representing the set of tools to be used. + :type toolset: Optional[_models.ToolSet] + """ + if functions: + self._function_tool = _models.FunctionTool(functions) + elif function_tool: + self._function_tool = function_tool + elif toolset: + tool = toolset.get_tool(_models.FunctionTool) + self._function_tool = tool + __all__: List[str] = [ "AgentsOperations", diff --git a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py index a57cb6cb93fc..2e2f33a6fad0 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_run_with_toolset_async.py @@ -40,6 +40,7 @@ async def main() -> None: toolset = AsyncToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], diff --git a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py index 781c9fe4b190..041bc8fcd499 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/async_samples/sample_agents_stream_eventhandler_with_toolset_async.py @@ -70,6 +70,7 @@ async def main() -> None: toolset = AsyncToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(functions=user_async_functions) agent = await project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py index 6f1b99c28915..560117e427c7 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py @@ -332,6 +332,8 @@ def process_request(self, request: str) -> None: ) print(f"Created and processed run for agent '{agent.name}', run ID: {run.id}") messages = self._project_client.agents.list_messages(thread_id=self._agent_thread.id) + print(run) + print(messages) text_message = messages.get_last_text_message_by_role(role=MessageRole.AGENT) if text_message and text_message.text: print(f"Agent '{agent.name}' completed task. " f"Outcome: {text_message.text.value}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py index 184a7ef20ca5..034a7d7acf94 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team.py @@ -22,7 +22,7 @@ import os from azure.ai.projects import AIProjectClient from azure.identity import DefaultAzureCredential -from agent_team import AgentTeam +from agent_team import AgentTeam, _create_task from agent_trace_configurator import AgentTraceConfigurator project_client = AIProjectClient.from_connection_string( @@ -35,6 +35,7 @@ if model_deployment_name is not None: AgentTraceConfigurator(project_client=project_client).setup_tracing() with project_client: + project_client.agents.enable_auto_function_calls(functions={_create_task}) agent_team = AgentTeam("test_team", project_client=project_client) agent_team.add_agent( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py index 62a270799eee..039ca3071543 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_agent_team_custom_team_leader.py @@ -73,6 +73,8 @@ def create_task(team_name: str, recipient: str, request: str, requestor: str) -> agent_team = AgentTeam("test_team", project_client=project_client) toolset = ToolSet() toolset.add(default_function_tool) + + project_client.agents.enable_auto_function_calls(toolset=toolset) agent_team.set_team_leader( model=model_deployment_name, name="TeamLeader", diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py index ae9c05323d22..0f10a464e6ba 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/sample_agents_multi_agent_team.py @@ -42,6 +42,12 @@ model_deployment_name = os.getenv("MODEL_DEPLOYMENT_NAME") +project_client.agents.enable_auto_function_calls( + function_tool=FunctionTool( + {fetch_current_datetime, fetch_weather, send_email_using_recipient_name, convert_temperature} + ) +) + if model_deployment_name is not None: AgentTraceConfigurator(project_client=project_client).setup_tracing() with project_client: diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py index a47bcf876051..4b9305af9c44 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_basics.py @@ -49,12 +49,12 @@ thread = project_client.agents.create_thread() # [END create_thread] print(f"Created thread, thread ID: {thread.id}") - + # List all threads for the agent # [START list_threads] threads = project_client.agents.list_threads() # [END list_threads] - + # Print thread information print(f"Threads for agent {agent.id}:") for current_thread in threads.data: diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py index 16e1259bca1e..c0cc94b770e8 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py @@ -48,10 +48,17 @@ # Initialize agent OpenApi tool using the read in OpenAPI spec openapi_tool = OpenApiTool( - name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth, default_parameters=["format"] + name="get_weather", + spec=openapi_weather, + description="Retrieve weather information for a location", + auth=auth, + default_parameters=["format"], ) openapi_tool.add_definition( - name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth, + name="get_countries", + spec=openapi_countries, + description="Retrieve a list of countries", + auth=auth, ) # Create agent with OpenApi tool and process assistant run diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py index 5f734cefa31b..51c3f2a90993 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_run_with_toolset.py @@ -44,6 +44,9 @@ toolset.add(functions) toolset.add(code_interpreter) + # To enable tool calls executed automatically + project_client.agents.enable_auto_function_calls(toolset=toolset) + agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py index a42b1949d31e..ad709cf9c1ad 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_toolset.py @@ -77,6 +77,7 @@ def on_unhandled_event(self, event_type: str, event_data: Any) -> None: functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py index 52c8151a7d12..ee9010a02054 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_iteration_with_toolset.py @@ -44,6 +44,7 @@ toolset.add(functions) with project_client: + project_client.agents.enable_auto_function_calls(toolset=toolset) agent = project_client.agents.create_agent( model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py index 4a1b8e00217a..76675ef5a884 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations.py @@ -213,12 +213,20 @@ def _assert_tool_call(self, submit_tool_mock: MagicMock, run_id: str, tool_set: else: submit_tool_mock.assert_not_called() - def _assert_toolset_dict(self, project_client: AIProjectClient, agent_id: str, toolset: Optional[ToolSet]): - """Check that the tool set dictionary state is as expected.""" - if toolset is None: - assert agent_id not in project_client.agents._toolset - else: - assert project_client.agents._toolset.get(agent_id) is not None + def _set_toolcalls( + self, project_client: AgentsOperations, toolset1: Optional[ToolSet], toolset2: Optional[ToolSet] + ) -> None: + """Set the tool calls for the agent.""" + if toolset1 and toolset2: + function_in_toolset1 = set(toolset1.get_tool(tool_type=FunctionTool)._functions.values()) + function_in_toolset2 = set(toolset2.get_tool(tool_type=FunctionTool)._functions.values()) + function_tool = FunctionTool(function_in_toolset1) + function_tool.add_functions(function_in_toolset2) + project_client.enable_auto_function_calls(function_tool=function_tool) + elif toolset1: + project_client.enable_auto_function_calls(toolset=toolset1) + elif toolset2: + project_client.enable_auto_function_calls(toolset=toolset2) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -261,6 +269,7 @@ def test_multiple_agents_create( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() with project_client: + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Check that pipelines are created as expected. agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -283,16 +292,9 @@ def test_multiple_agents_create( project_client.agents.create_and_process_run(thread_id="some_thread_id", agent_id=agent2.id) self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run456", toolset2) - # Check the contents of a toolset - self._assert_toolset_dict(project_client, agent1.id, toolset1) - self._assert_toolset_dict(project_client, agent2.id, toolset2) # Check that we cleanup tools after deleting agent. project_client.agents.delete_agent(agent1.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, toolset2) project_client.agents.delete_agent(agent2.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, None) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -333,12 +335,11 @@ def test_update_agent_tools( instructions="You are a helpful assistant", toolset=toolset1, ) - self._assert_toolset_dict(project_client, agent1.id, toolset1) - project_client.agents.update_agent(agent1.id, toolset=toolset2) + agent1 = project_client.agents.update_agent(agent1.id, toolset=toolset2) if toolset2 is None: - self._assert_toolset_dict(project_client, agent1.id, toolset1) + assert agent1.tools == None else: - self._assert_toolset_dict(project_client, agent1.id, toolset2) + assert agent1.tools[0].function.name == function2.__name__ @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -382,6 +383,7 @@ def test_create_run_tools_override( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset1, toolset2) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -389,7 +391,6 @@ def test_create_run_tools_override( toolset=toolset1, ) self._assert_pipeline_and_reset(mock_pipeline._pipeline.run, tool_set=toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) # Create run with new tool set, which also can be none. project_client.agents.create_and_process_run( @@ -399,7 +400,6 @@ def test_create_run_tools_override( self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset2) else: self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) @patch("azure.ai.projects._patch.PipelineClient") @pytest.mark.parametrize( @@ -437,6 +437,7 @@ def test_with_azure_function( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -499,6 +500,7 @@ def test_handle_submit_tool_outputs( project_client = self.get_mock_client() with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -545,9 +547,8 @@ async def test_create_stream_with_tool_calls(self, mock_submit_tool_outputs_to_r functions = FunctionTool(user_functions) toolset = ToolSet() toolset.add(functions) - operation = AgentsOperations() - operation._toolset = {"asst_01": toolset} + operation.enable_auto_function_calls(toolset=toolset) count = 0 with operation.create_stream(thread_id="thread_id", agent_id="asst_01") as stream: diff --git a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py index d3ab2004c423..d45b1612d594 100644 --- a/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py +++ b/sdk/ai/azure-ai-projects/tests/agents/test_agent_operations_async.py @@ -215,12 +215,20 @@ def _assert_tool_call(self, submit_tool_mock: AsyncMock, run_id: str, tool_set: else: submit_tool_mock.assert_not_called() - def _assert_toolset_dict(self, project_client: AIProjectClient, agent_id: str, toolset: Optional[AsyncToolSet]): - """Check that the tool set dictionary state is as expected.""" - if toolset is None: - assert agent_id not in project_client.agents._toolset - else: - assert project_client.agents._toolset.get(agent_id) is not None + def _set_toolcalls( + self, project_client: AgentsOperations, toolset1: Optional[AsyncToolSet], toolset2: Optional[AsyncToolSet] + ) -> None: + """Set the tool calls for the agent.""" + if toolset1 and toolset2: + function_in_toolset1 = set(toolset1.get_tool(tool_type=AsyncFunctionTool)._functions.values()) + function_in_toolset2 = set(toolset2.get_tool(tool_type=AsyncFunctionTool)._functions.values()) + function_tool = AsyncFunctionTool(function_in_toolset1) + function_tool.add_functions(function_in_toolset2) + project_client.enable_auto_function_calls(function_tool=function_tool) + elif toolset1: + project_client.enable_auto_function_calls(toolset=toolset1) + elif toolset2: + project_client.enable_auto_function_calls(toolset=toolset2) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -264,6 +272,7 @@ async def test_multiple_agents_create( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() async with project_client: + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Check that pipelines are created as expected. agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -286,16 +295,6 @@ async def test_multiple_agents_create( await project_client.agents.create_and_process_run(thread_id="some_thread_id", agent_id=agent2.id) self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run456", toolset2) - # Check the contents of a toolset - self._assert_toolset_dict(project_client, agent1.id, toolset1) - self._assert_toolset_dict(project_client, agent2.id, toolset2) - # Check that we cleanup tools after deleting agent. - await project_client.agents.delete_agent(agent1.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, toolset2) - await project_client.agents.delete_agent(agent2.id) - self._assert_toolset_dict(project_client, agent1.id, None) - self._assert_toolset_dict(project_client, agent2.id, None) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -337,12 +336,11 @@ async def test_update_agent_tools( instructions="You are a helpful assistant", toolset=toolset1, ) - self._assert_toolset_dict(project_client, agent1.id, toolset1) - await project_client.agents.update_agent(agent1.id, toolset=toolset2) + agent1 = await project_client.agents.update_agent(agent1.id, toolset=toolset2) if toolset2 is None: - self._assert_toolset_dict(project_client, agent1.id, toolset1) + assert agent1.tools == None else: - self._assert_toolset_dict(project_client, agent1.id, toolset2) + assert agent1.tools[0].function.name == function2.__name__ @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -394,8 +392,8 @@ async def test_create_run_tools_override( toolset=toolset1, ) self._assert_pipeline_and_reset(mock_pipeline._pipeline.run, tool_set=toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) + self._set_toolcalls(project_client.agents, toolset1, toolset2) # Create run with new tool set, which also can be none. await project_client.agents.create_and_process_run( thread_id="some_thread_id", agent_id=agent1.id, toolset=toolset2 @@ -404,7 +402,6 @@ async def test_create_run_tools_override( self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset2) else: self._assert_tool_call(project_client.agents.submit_tool_outputs_to_run, "run123", toolset1) - self._assert_toolset_dict(project_client, agent1.id, toolset1) @pytest.mark.asyncio @patch("azure.ai.projects.aio._patch.AsyncPipelineClient") @@ -441,8 +438,10 @@ async def test_with_azure_function( mock_pipeline._pipeline.run.return_value = mock_pipeline_response mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() + async with project_client: # Check that pipelines are created as expected. + self._set_toolcalls(project_client.agents, toolset, None) agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", name="first", @@ -505,6 +504,7 @@ async def test_handle_submit_tool_outputs( mock_pipeline_client_gen.return_value = mock_pipeline project_client = self.get_mock_client() async with project_client: + self._set_toolcalls(project_client.agents, toolset, None) # Check that pipelines are created as expected. agent1 = await project_client.agents.create_agent( model="gpt-4-1106-preview", @@ -554,7 +554,7 @@ async def test_create_stream_with_tool_calls(self, mock_submit_tool_outputs_to_r toolset.add(functions) operation = AgentsOperations() - operation._toolset = {"asst_01": toolset} + operation.enable_auto_function_calls(toolset=toolset) count = 0 async with await operation.create_stream(thread_id="thread_id", agent_id="asst_01") as stream: diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py index 2eca8bd339ab..db43e4536d71 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor.py @@ -54,16 +54,6 @@ _utils._span_impl_type = settings.tracing_implementation() -# TODO - remove when https://github.com/Azure/azure-sdk-for-python/issues/40086 is fixed -class FakeToolSetDict(dict): - def __init__(self, toolset=None, *args, **kwargs): - super().__init__(*args, **kwargs) - self.toolset = toolset - - def get(self, k, default=None): - return self.toolset - - class TestAiAgentsInstrumentor(AzureRecordedTestCase): """Tests for AI agents instrumentor.""" @@ -501,7 +491,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message( @@ -724,7 +714,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message(thread_id=thread.id, role="user", content="Времето в София?") @@ -826,12 +816,14 @@ def fetch_weather(location: str) -> str: toolset.add(functions) client = self.create_client(**kwargs) + client.agents.enable_auto_function_calls(toolset=toolset) + agent = client.agents.create_agent( model="gpt-4o", name="my-agent", instructions="You are helpful agent", toolset=toolset ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = client.agents.create_thread() message = client.agents.create_message( thread_id=thread.id, role="user", content="What is the weather in New York?" diff --git a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py index 804a4fc287fb..4e89c66ade13 100644 --- a/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py +++ b/sdk/ai/azure-ai-projects/tests/telemetry/test_ai_agents_instrumentor_async.py @@ -53,16 +53,6 @@ content_tracing_initial_value = os.getenv(CONTENT_TRACING_ENV_VARIABLE) -# TODO - remove when https://github.com/Azure/azure-sdk-for-python/issues/40086 is fixed -class FakeToolSetDict(dict): - def __init__(self, toolset=None, *args, **kwargs): - super().__init__(*args, **kwargs) - self.toolset = toolset - - def get(self, k, default=None): - return self.toolset - - class TestAiAgentsInstrumentor(AzureRecordedTestCase): """Tests for AI agents instrumentor.""" @@ -437,12 +427,14 @@ def fetch_weather(location: str) -> str: toolset.add(functions) client = self.create_client(**kwargs) + client.agents.enable_auto_function_calls(toolset=toolset) + agent = await client.agents.create_agent( model="gpt-4o", name="my-agent", instructions="You are helpful agent", toolset=toolset ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = await client.agents.create_thread() message = await client.agents.create_message( @@ -662,7 +654,7 @@ def fetch_weather(location: str) -> str: ) # workaround for https://github.com/Azure/azure-sdk-for-python/issues/40086 - client.agents._toolset = FakeToolSetDict(toolset=toolset) + client.agents.enable_auto_function_calls(toolset=toolset) thread = await client.agents.create_thread() message = await client.agents.create_message( From 4d831955555c6cc2d9d86ccd4741e74094b8da8b Mon Sep 17 00:00:00 2001 From: Jarno Hakulinen Date: Tue, 15 Apr 2025 13:47:35 -0800 Subject: [PATCH 10/13] Jhakulin/image input for assistants (#40410) Image input support for assistants create message --- sdk/ai/azure-ai-projects/CHANGELOG.md | 7 +- sdk/ai/azure-ai-projects/README.md | 84 +++- .../azure-ai-projects/apiview-properties.json | 12 +- .../azure/ai/projects/_types.py | 3 +- .../ai/projects/aio/operations/_operations.py | 117 ++---- .../ai/projects/aio/operations/_patch.py | 12 +- .../azure/ai/projects/models/__init__.py | 22 +- .../azure/ai/projects/models/_enums.py | 26 ++ .../azure/ai/projects/models/_models.py | 372 +++++++++++++++--- .../ai/projects/operations/_operations.py | 119 ++---- .../azure/ai/projects/operations/_patch.py | 14 +- .../samples/agents/image_file.png | Bin 0 -> 183951 bytes .../samples/agents/sample_agents_basics.py | 9 - .../sample_agents_image_input_base64.py | 110 ++++++ .../agents/sample_agents_image_input_file.py | 91 +++++ .../agents/sample_agents_image_input_url.py | 90 +++++ .../samples/agents/sample_agents_openapi.py | 1 - sdk/ai/azure-ai-projects/tsp-location.yaml | 2 +- 18 files changed, 849 insertions(+), 242 deletions(-) create mode 100644 sdk/ai/azure-ai-projects/samples/agents/image_file.png create mode 100644 sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_base64.py create mode 100644 sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py create mode 100644 sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index c690ff55ac6e..58fb350038a9 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -4,13 +4,14 @@ ### Features added * Utilities to load prompt template strings and Prompty file content -* Add BingCustomSearchTool class with sample -* Add list_threads API to agents namespace +* Added BingCustomSearchTool class with sample +* Added list_threads API to agents namespace +* Added image input support for agents create_message ### Sample updates * Added `project_client.agents.enable_auto_function_calls(toolset=toolset)` to all samples that has `toolcalls` executed by `azure-ai-project` SDK * New BingCustomSearchTool sample -* Add list_threads usage to agent basics sample +* New samples added for image input from url, file and base64 ### Bugs Fixed diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index a1403cfb1eeb..922f50231e31 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -52,6 +52,7 @@ To report an issue with the client library, or request additional features, plea - [Create message](#create-message) with: - [File search attachment](#create-message-with-file-search-attachment) - [Code interpreter attachment](#create-message-with-code-interpreter-attachment) + - [Create Message with Image Inputs](#create-message-with-image-inputs) - [Execute Run, Run_and_Process, or Stream](#create-run-run_and_process-or-stream) - [Retrieve message](#retrieve-message) - [Retrieve file](#retrieve-file) @@ -609,7 +610,6 @@ agent = project_client.agents.create_agent( Currently, the Azure Function integration for the AI Agent has the following limitations: -- Azure Functions integration is available **only for non-streaming scenarios**. - Supported trigger for Azure Function is currently limited to **Queue triggers** only. HTTP or other trigger types and streaming responses are not supported at this time. @@ -985,6 +985,88 @@ message = project_client.agents.create_message( +#### Create Message with Image Inputs + +You can send messages to Azure agents with image inputs in following ways: + +- **Using an image stored as a uploaded file** +- **Using a public image accessible via URL** +- **Using a base64 encoded image string** + +The following examples demonstrate each method: + +##### Create message using uploaded image file + +```python +# Upload the local image file +image_file = project_client.agents.upload_file_and_poll(file_path="image_file.png", purpose="assistants") + +# Construct content using uploaded image +file_param = MessageImageFileParam(file_id=image_file.id, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageFileBlock(image_file=file_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + +##### Create message with an image URL input + +```python +# Specify the public image URL +image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + +# Create content directly referencing image URL +url_param = MessageImageUrlParam(url=image_url, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageUrlBlock(image_url=url_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + +##### Create message with base64-encoded image input + +```python +import base64 + +def image_file_to_base64(path: str) -> str: + with open(path, "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + +# Convert your image file to base64 format +image_base64 = image_file_to_base64("image_file.png") + +# Prepare the data URL +img_data_url = f"data:image/png;base64,{image_base64}" + +# Use base64 encoded string as image URL parameter +url_param = MessageImageUrlParam(url=img_data_url, detail="high") +content_blocks = [ + MessageInputTextBlock(text="Hello, what is in the image?"), + MessageInputImageUrlBlock(image_url=url_param), +] + +# Create the message +message = project_client.agents.create_message( + thread_id=thread.id, + role="user", + content=content_blocks +) +``` + #### Create Run, Run_and_Process, or Stream To process your message, you can use `create_run`, `create_and_process_run`, or `create_stream`. diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json index 7164e456ca70..edfd93b298e0 100644 --- a/sdk/ai/azure-ai-projects/apiview-properties.json +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -23,6 +23,8 @@ "azure.ai.projects.models.BingGroundingToolDefinition": "Azure.AI.Projects.Agents.BingGroundingToolDefinition", "azure.ai.projects.models.CodeInterpreterToolDefinition": "Azure.AI.Projects.Agents.CodeInterpreterToolDefinition", "azure.ai.projects.models.CodeInterpreterToolResource": "Azure.AI.Projects.Agents.CodeInterpreterToolResource", + "azure.ai.projects.models.ConnectedAgentDetails": "Azure.AI.Projects.Agents.ConnectedAgentDetails", + "azure.ai.projects.models.ConnectedAgentToolDefinition": "Azure.AI.Projects.Agents.ConnectedAgentToolDefinition", "azure.ai.projects.models.Trigger": "Azure.AI.Projects.Trigger", "azure.ai.projects.models.CronTrigger": "Azure.AI.Projects.CronTrigger", "azure.ai.projects.models.Dataset": "Azure.AI.Projects.Dataset", @@ -60,7 +62,13 @@ "azure.ai.projects.models.MessageDeltaTextUrlCitationDetails": "Azure.AI.Projects.Agents.MessageDeltaTextUrlCitationDetails", "azure.ai.projects.models.MessageImageFileContent": "Azure.AI.Projects.Agents.MessageImageFileContent", "azure.ai.projects.models.MessageImageFileDetails": "Azure.AI.Projects.Agents.MessageImageFileDetails", + "azure.ai.projects.models.MessageImageFileParam": "Azure.AI.Projects.Agents.MessageImageFileParam", + "azure.ai.projects.models.MessageImageUrlParam": "Azure.AI.Projects.Agents.MessageImageUrlParam", "azure.ai.projects.models.MessageIncompleteDetails": "Azure.AI.Projects.Agents.MessageIncompleteDetails", + "azure.ai.projects.models.MessageInputContentBlock": "Azure.AI.Projects.Agents.MessageInputContentBlock", + "azure.ai.projects.models.MessageInputImageFileBlock": "Azure.AI.Projects.Agents.MessageInputImageFileBlock", + "azure.ai.projects.models.MessageInputImageUrlBlock": "Azure.AI.Projects.Agents.MessageInputImageUrlBlock", + "azure.ai.projects.models.MessageInputTextBlock": "Azure.AI.Projects.Agents.MessageInputTextBlock", "azure.ai.projects.models.MessageTextAnnotation": "Azure.AI.Projects.Agents.MessageTextAnnotation", "azure.ai.projects.models.MessageTextContent": "Azure.AI.Projects.Agents.MessageTextContent", "azure.ai.projects.models.MessageTextDetails": "Azure.AI.Projects.Agents.MessageTextDetails", @@ -156,7 +164,6 @@ "azure.ai.projects.models.UpdateCodeInterpreterToolResourceOptions": "Azure.AI.Projects.Agents.UpdateCodeInterpreterToolResourceOptions", "azure.ai.projects.models.UpdateFileSearchToolResourceOptions": "Azure.AI.Projects.Agents.UpdateFileSearchToolResourceOptions", "azure.ai.projects.models.UpdateToolResourcesOptions": "Azure.AI.Projects.Agents.UpdateToolResourcesOptions", - "azure.ai.projects.models.UploadFileRequest": "Azure.AI.Projects.Agents.uploadFile.Request.anonymous", "azure.ai.projects.models.VectorStore": "Azure.AI.Projects.Agents.VectorStore", "azure.ai.projects.models.VectorStoreChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreChunkingStrategyRequest", "azure.ai.projects.models.VectorStoreAutoChunkingStrategyRequest": "Azure.AI.Projects.Agents.VectorStoreAutoChunkingStrategyRequest", @@ -182,6 +189,8 @@ "azure.ai.projects.models.ResponseFormat": "Azure.AI.Projects.Agents.ResponseFormat", "azure.ai.projects.models.ListSortOrder": "Azure.AI.Projects.Agents.ListSortOrder", "azure.ai.projects.models.MessageRole": "Azure.AI.Projects.Agents.MessageRole", + "azure.ai.projects.models.MessageBlockType": "Azure.AI.Projects.Agents.MessageBlockType", + "azure.ai.projects.models.ImageDetailLevel": "Azure.AI.Projects.Agents.ImageDetailLevel", "azure.ai.projects.models.MessageStatus": "Azure.AI.Projects.Agents.MessageStatus", "azure.ai.projects.models.MessageIncompleteDetailsReason": "Azure.AI.Projects.Agents.MessageIncompleteDetailsReason", "azure.ai.projects.models.RunStatus": "Azure.AI.Projects.Agents.RunStatus", @@ -238,7 +247,6 @@ "azure.ai.projects.AIProjectClient.agents.get_run_step": "Azure.AI.Projects.Agents.getRunStep", "azure.ai.projects.AIProjectClient.agents.list_run_steps": "Azure.AI.Projects.Agents.listRunSteps", "azure.ai.projects.AIProjectClient.agents.list_files": "Azure.AI.Projects.Agents.listFiles", - "azure.ai.projects.AIProjectClient.agents.upload_file": "Azure.AI.Projects.Agents.uploadFile", "azure.ai.projects.AIProjectClient.agents.delete_file": "Azure.AI.Projects.Agents.deleteFile", "azure.ai.projects.AIProjectClient.agents.get_file": "Azure.AI.Projects.Agents.getFile", "azure.ai.projects.AIProjectClient.agents.list_vector_stores": "Azure.AI.Projects.Agents.listVectorStores", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py index 1c059e5809cc..ff7e15ec008a 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_types.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union +from typing import List, TYPE_CHECKING, Union if TYPE_CHECKING: from . import models as _models @@ -17,5 +17,6 @@ "_models.AgentsApiResponseFormat", "_models.ResponseFormatJsonSchemaType", ] +MessageInputContent = Union[str, List["_models.MessageInputContentBlock"]] MessageAttachmentToolDefinition = Union["_models.CodeInterpreterToolDefinition", "_models.FileSearchToolDefinition"] AgentsApiToolChoiceOption = Union[str, str, "_models.AgentsApiToolChoiceOptionMode", "_models.AgentsNamedToolChoice"] diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index 895d5161947f..b1f9ba8cb084 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -46,7 +46,7 @@ from ... import _model_base, models as _models from ..._model_base import SdkJSONEncoder, _deserialize from ..._serialization import Deserializer, Serializer -from ..._vendor import FileType, prepare_multipart_form_data +from ..._vendor import prepare_multipart_form_data from ...operations._operations import ( build_agents_cancel_run_request, build_agents_cancel_vector_store_file_batch_request, @@ -1443,7 +1443,7 @@ async def create_message( thread_id: str, *, role: Union[str, _models.MessageRole], - content: str, + content: "_types.MessageInputContent", content_type: str = "application/json", attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, @@ -1454,18 +1454,16 @@ async def create_message( :param thread_id: Identifier of the thread. Required. :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1525,7 +1523,7 @@ async def create_message( body: Union[JSON, IO[bytes]] = _Unset, *, role: Union[str, _models.MessageRole] = _Unset, - content: str = _Unset, + content: "_types.MessageInputContent" = _Unset, attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -1537,17 +1535,16 @@ async def create_message( :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword attachments: A list of files attached to the message, and the tools they should be added to. Default value is None. :paramtype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -3546,59 +3543,18 @@ async def list_files( return deserialized # type: ignore @overload - async def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ - + async def _upload_file(self, body: _models._models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: ... @overload - async def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :param body: Required. - :type body: JSON - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ + async def _upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: ... @distributed_trace_async - async def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any + async def _upload_file( + self, body: Union[_models._models.UploadFileRequest, JSON], **kwargs: Any ) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models._models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -3616,13 +3572,6 @@ async def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -3636,12 +3585,16 @@ async def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index 95ec7a20ccc0..d60548fe5141 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -2422,15 +2422,19 @@ async def upload_file( :raises IOError: If there are issues with reading the file. :raises: HttpResponseError for HTTP errors. """ + # If a JSON body is provided directly, pass it along if body is not None: - return await super().upload_file(body=body, **kwargs) + return await super()._upload_file(body=body, **kwargs) + # Convert FilePurpose enum to string if necessary if isinstance(purpose, FilePurpose): purpose = purpose.value + # If file content is passed in directly if file is not None and purpose is not None: - return await super().upload_file(file=file, purpose=purpose, filename=filename, **kwargs) + return await super()._upload_file(body={"file": file, "purpose": purpose, "filename": filename}, **kwargs) + # If a file path is provided if file_path is not None and purpose is not None: if not os.path.isfile(file_path): raise FileNotFoundError(f"The file path provided does not exist: {file_path}") @@ -2439,11 +2443,11 @@ async def upload_file( with open(file_path, "rb") as f: content = f.read() - # Determine filename and create correct FileType + # If no explicit filename is provided, use the base name base_filename = filename or os.path.basename(file_path) file_content: FileType = (base_filename, content) - return await super().upload_file(file=file_content, purpose=purpose, **kwargs) + return await super()._upload_file(body={"file": file_content, "purpose": purpose}, **kwargs) except IOError as e: raise IOError(f"Unable to read file: {file_path}.") from e diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py index 23a9fc738fa4..6e1bbfc0c2fc 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py @@ -33,6 +33,8 @@ BingGroundingToolDefinition, CodeInterpreterToolDefinition, CodeInterpreterToolResource, + ConnectedAgentDetails, + ConnectedAgentToolDefinition, CronTrigger, Dataset, Evaluation, @@ -70,7 +72,13 @@ MessageDeltaTextUrlCitationDetails, MessageImageFileContent, MessageImageFileDetails, + MessageImageFileParam, + MessageImageUrlParam, MessageIncompleteDetails, + MessageInputContentBlock, + MessageInputImageFileBlock, + MessageInputImageUrlBlock, + MessageInputTextBlock, MessageTextAnnotation, MessageTextContent, MessageTextDetails, @@ -169,7 +177,6 @@ UpdateCodeInterpreterToolResourceOptions, UpdateFileSearchToolResourceOptions, UpdateToolResourcesOptions, - UploadFileRequest, VectorStore, VectorStoreAutoChunkingStrategyRequest, VectorStoreAutoChunkingStrategyResponse, @@ -203,8 +210,10 @@ FilePurpose, FileState, Frequency, + ImageDetailLevel, IncompleteDetailsReason, ListSortOrder, + MessageBlockType, MessageIncompleteDetailsReason, MessageRole, MessageStatus, @@ -255,6 +264,8 @@ "BingGroundingToolDefinition", "CodeInterpreterToolDefinition", "CodeInterpreterToolResource", + "ConnectedAgentDetails", + "ConnectedAgentToolDefinition", "CronTrigger", "Dataset", "Evaluation", @@ -292,7 +303,13 @@ "MessageDeltaTextUrlCitationDetails", "MessageImageFileContent", "MessageImageFileDetails", + "MessageImageFileParam", + "MessageImageUrlParam", "MessageIncompleteDetails", + "MessageInputContentBlock", + "MessageInputImageFileBlock", + "MessageInputImageUrlBlock", + "MessageInputTextBlock", "MessageTextAnnotation", "MessageTextContent", "MessageTextDetails", @@ -391,7 +408,6 @@ "UpdateCodeInterpreterToolResourceOptions", "UpdateFileSearchToolResourceOptions", "UpdateToolResourcesOptions", - "UploadFileRequest", "VectorStore", "VectorStoreAutoChunkingStrategyRequest", "VectorStoreAutoChunkingStrategyResponse", @@ -422,8 +438,10 @@ "FilePurpose", "FileState", "Frequency", + "ImageDetailLevel", "IncompleteDetailsReason", "ListSortOrder", + "MessageBlockType", "MessageIncompleteDetailsReason", "MessageRole", "MessageStatus", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py index 9e780544fea8..20b2b87e2cc2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_enums.py @@ -47,6 +47,8 @@ class AgentsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tool type ``azure_ai_search``""" BING_CUSTOM_SEARCH = "bing_custom_search" """Tool type ``bing_custom_search``""" + CONNECTED_AGENT = "connected_agent" + """Tool type ``connected_agent``""" class AgentStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -251,6 +253,17 @@ class Frequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): MINUTE = "Minute" +class ImageDetailLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies an image's detail level. Can be 'auto', 'low', 'high', or an unknown future value.""" + + AUTO = "auto" + """Automatically select an appropriate detail level.""" + LOW = "low" + """Use a lower detail level to reduce bandwidth or cost.""" + HIGH = "high" + """Use a higher detail level—potentially more resource-intensive.""" + + class IncompleteDetailsReason(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The reason why the run is incomplete. This will point to which specific token limit was reached over the course of the run. @@ -271,6 +284,19 @@ class ListSortOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies a descending sort order.""" +class MessageBlockType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the kind of content block within a message. Could be text, an image file, an external + image URL, or an unknown future type. + """ + + TEXT = "text" + """Indicates a block containing text content.""" + IMAGE_FILE = "image_file" + """Indicates a block referencing an internally uploaded image file.""" + IMAGE_URL = "image_url" + """Indicates a block referencing an external image URL.""" + + class MessageIncompleteDetailsReason(str, Enum, metaclass=CaseInsensitiveEnumMeta): """A set of reasons describing why a message is marked as incomplete.""" diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index be0fe642c098..718754ae47b1 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -16,6 +16,7 @@ from .._vendor import FileType from ._enums import ( AuthenticationType, + MessageBlockType, OpenApiAuthType, RunStepType, VectorStoreChunkingStrategyRequestType, @@ -224,9 +225,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class AgentsNamedToolChoice(_model_base.Model): """Specifies a tool the model should use. Use to force the model to call a specific tool. - :ivar type: the type of tool. If type is ``function``\\, the function name must be set. Required. + :ivar type: the type of tool. If type is ``function``, the function name must be set. Required. Known values are: "function", "code_interpreter", "file_search", "bing_grounding", - "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", and "bing_custom_search". + "fabric_dataagent", "sharepoint_grounding", "azure_ai_search", "bing_custom_search", and + "connected_agent". :vartype type: str or ~azure.ai.projects.models.AgentsNamedToolChoiceType :ivar function: The name of the function to call. :vartype function: ~azure.ai.projects.models.FunctionName @@ -235,10 +237,10 @@ class AgentsNamedToolChoice(_model_base.Model): type: Union[str, "_models.AgentsNamedToolChoiceType"] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) - """the type of tool. If type is \"function\" , the function name must be set. Required. Known + """the type of tool. If type is ``function``, the function name must be set. Required. Known values are: \"function\", \"code_interpreter\", \"file_search\", \"bing_grounding\", - \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", and - \"bing_custom_search\".""" + \"fabric_dataagent\", \"sharepoint_grounding\", \"azure_ai_search\", \"bing_custom_search\", + and \"connected_agent\".""" function: Optional["_models.FunctionName"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The name of the function to call.""" @@ -469,7 +471,7 @@ class AOAIModelConfig(TargetModelConfig, discriminator="AOAI"): :ivar type: Required. Default value is "AOAI". :vartype type: str - :ivar azure_endpoint: Endpoint URL for AOAI model. Required. + :ivar azure_endpoint: Endpoint targetURI for AOAI model. Required. :vartype azure_endpoint: str :ivar api_key: API Key for AOAI model. Required. :vartype api_key: str @@ -480,7 +482,7 @@ class AOAIModelConfig(TargetModelConfig, discriminator="AOAI"): type: Literal["AOAI"] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """Required. Default value is \"AOAI\".""" azure_endpoint: str = rest_field(name="azureEndpoint", visibility=["read", "create", "update", "delete", "query"]) - """Endpoint URL for AOAI model. Required.""" + """Endpoint targetURI for AOAI model. Required.""" api_key: str = rest_field(name="apiKey", visibility=["read", "create", "update", "delete", "query"]) """API Key for AOAI model. Required.""" azure_deployment: str = rest_field( @@ -659,9 +661,9 @@ class ToolDefinition(_model_base.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureAISearchToolDefinition, AzureFunctionToolDefinition, BingCustomSearchToolDefinition, - BingGroundingToolDefinition, CodeInterpreterToolDefinition, MicrosoftFabricToolDefinition, - FileSearchToolDefinition, FunctionToolDefinition, OpenApiToolDefinition, - SharepointToolDefinition + BingGroundingToolDefinition, CodeInterpreterToolDefinition, ConnectedAgentToolDefinition, + MicrosoftFabricToolDefinition, FileSearchToolDefinition, FunctionToolDefinition, + OpenApiToolDefinition, SharepointToolDefinition :ivar type: The object type. Required. Default value is None. :vartype type: str @@ -1015,6 +1017,83 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class ConnectedAgentDetails(_model_base.Model): + """Information for connecting one agent to another as a tool. + + :ivar id: The identifier of the child agent. Required. + :vartype id: str + :ivar name: The name of the agent to be called. Required. + :vartype name: str + :ivar description: A description of what the agent does, used by the model to choose when and + how to call the agent. Required. + :vartype description: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The identifier of the child agent. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent to be called. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the agent does, used by the model to choose when and how to call the + agent. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + description: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ConnectedAgentToolDefinition(ToolDefinition, discriminator="connected_agent"): + """The input definition information for a connected agent tool which defines a domain specific + sub-agent. + + :ivar type: The object type, which is always 'connected_agent'. Required. Default value is + "connected_agent". + :vartype type: str + :ivar connected_agent: The sub-agent to connect. Required. + :vartype connected_agent: ~azure.ai.projects.models.ConnectedAgentDetails + """ + + type: Literal["connected_agent"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'connected_agent'. Required. Default value is + \"connected_agent\".""" + connected_agent: "_models.ConnectedAgentDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The sub-agent to connect. Required.""" + + @overload + def __init__( + self, + *, + connected_agent: "_models.ConnectedAgentDetails", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type="connected_agent", **kwargs) + + class CredentialsApiKeyAuth(_model_base.Model): """The credentials needed for API key authentication. @@ -2199,7 +2278,7 @@ class MAASModelConfig(TargetModelConfig, discriminator="MAAS"): :ivar type: Required. Default value is "MAAS". :vartype type: str - :ivar azure_endpoint: Endpoint URL for MAAS model. Required. + :ivar azure_endpoint: Endpoint targetURI for MAAS model. Required. :vartype azure_endpoint: str :ivar api_key: API Key for MAAS model. Required. :vartype api_key: str @@ -2208,7 +2287,7 @@ class MAASModelConfig(TargetModelConfig, discriminator="MAAS"): type: Literal["MAAS"] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """Required. Default value is \"MAAS\".""" azure_endpoint: str = rest_field(name="azureEndpoint", visibility=["read", "create", "update", "delete", "query"]) - """Endpoint URL for MAAS model. Required.""" + """Endpoint targetURI for MAAS model. Required.""" api_key: str = rest_field(name="apiKey", visibility=["read", "create", "update", "delete", "query"]) """API Key for MAAS model. Required.""" @@ -2913,6 +2992,80 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class MessageImageFileParam(_model_base.Model): + """Defines how an internally uploaded image file is referenced when creating an image-file block. + + :ivar file_id: The ID of the previously uploaded image file. Required. + :vartype file_id: str + :ivar detail: Optional detail level for the image (auto, low, or high). Known values are: + "auto", "low", and "high". + :vartype detail: str or ~azure.ai.projects.models.ImageDetailLevel + """ + + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the previously uploaded image file. Required.""" + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional detail level for the image (auto, low, or high). Known values are: \"auto\", \"low\", + and \"high\".""" + + @overload + def __init__( + self, + *, + file_id: str, + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MessageImageUrlParam(_model_base.Model): + """Defines how an external image URL is referenced when creating an image-URL block. + + :ivar url: The publicly accessible URL of the external image. Required. + :vartype url: str + :ivar detail: Optional detail level for the image (auto, low, or high). Defaults to 'auto' if + not specified. Known values are: "auto", "low", and "high". + :vartype detail: str or ~azure.ai.projects.models.ImageDetailLevel + """ + + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The publicly accessible URL of the external image. Required.""" + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional detail level for the image (auto, low, or high). Defaults to 'auto' if not specified. + Known values are: \"auto\", \"low\", and \"high\".""" + + @overload + def __init__( + self, + *, + url: str, + detail: Optional[Union[str, "_models.ImageDetailLevel"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class MessageIncompleteDetails(_model_base.Model): """Information providing additional detail about a message entering an incomplete status. @@ -2947,6 +3100,146 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class MessageInputContentBlock(_model_base.Model): + """Defines a single content block when creating a message. The 'type' field determines whether it + is text, an image file, or an external image URL, etc. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MessageInputImageFileBlock, MessageInputImageUrlBlock, MessageInputTextBlock + + :ivar type: Specifies which kind of content block this is (text, image_file, image_url, etc.). + Required. Known values are: "text", "image_file", and "image_url". + :vartype type: str or ~azure.ai.projects.models.MessageBlockType + """ + + __mapping__: Dict[str, _model_base.Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Specifies which kind of content block this is (text, image_file, image_url, etc.). Required. + Known values are: \"text\", \"image_file\", and \"image_url\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MessageInputImageFileBlock(MessageInputContentBlock, discriminator="image_file"): + """An image-file block in a new message, referencing an internally uploaded image by file ID. + + :ivar type: Must be 'image_file' for an internally uploaded image block. Required. Indicates a + block referencing an internally uploaded image file. + :vartype type: str or ~azure.ai.projects.models.IMAGE_FILE + :ivar image_file: Information about the referenced image file, including file ID and optional + detail level. Required. + :vartype image_file: ~azure.ai.projects.models.MessageImageFileParam + """ + + type: Literal[MessageBlockType.IMAGE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'image_file' for an internally uploaded image block. Required. Indicates a block + referencing an internally uploaded image file.""" + image_file: "_models.MessageImageFileParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about the referenced image file, including file ID and optional detail level. + Required.""" + + @overload + def __init__( + self, + *, + image_file: "_models.MessageImageFileParam", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.IMAGE_FILE, **kwargs) + + +class MessageInputImageUrlBlock(MessageInputContentBlock, discriminator="image_url"): + """An image-URL block in a new message, referencing an external image by URL. + + :ivar type: Must be 'image_url' for an externally hosted image block. Required. Indicates a + block referencing an external image URL. + :vartype type: str or ~azure.ai.projects.models.IMAGE_URL + :ivar image_url: Information about the external image URL, including the URL and optional + detail level. Required. + :vartype image_url: ~azure.ai.projects.models.MessageImageUrlParam + """ + + type: Literal[MessageBlockType.IMAGE_URL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'image_url' for an externally hosted image block. Required. Indicates a block + referencing an external image URL.""" + image_url: "_models.MessageImageUrlParam" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about the external image URL, including the URL and optional detail level. + Required.""" + + @overload + def __init__( + self, + *, + image_url: "_models.MessageImageUrlParam", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.IMAGE_URL, **kwargs) + + +class MessageInputTextBlock(MessageInputContentBlock, discriminator="text"): + """A text block in a new message, containing plain text content. + + :ivar type: Must be 'text' for a text block. Required. Indicates a block containing text + content. + :vartype type: str or ~azure.ai.projects.models.TEXT + :ivar text: The plain text content for this block. Required. + :vartype text: str + """ + + type: Literal[MessageBlockType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Must be 'text' for a text block. Required. Indicates a block containing text content.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The plain text content for this block. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=MessageBlockType.TEXT, **kwargs) + + class MessageTextAnnotation(_model_base.Model): """An abstract representation of an annotation to text thread message content. @@ -3897,8 +4190,6 @@ class OpenApiFunctionDefinition(_model_base.Model): :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. :vartype default_params: list[str] - :ivar functions: List of functions returned in response. - :vartype functions: list[~azure.ai.projects.models.FunctionDefinition] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3912,10 +4203,6 @@ class OpenApiFunctionDefinition(_model_base.Model): """Open API authentication details. Required.""" default_params: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """List of OpenAPI spec parameters that will use user-provided defaults.""" - functions: Optional[List["_models.FunctionDefinition"]] = rest_field( - visibility=["read", "create", "update", "delete", "query"] - ) - """List of functions returned in response.""" @overload def __init__( @@ -3926,7 +4213,6 @@ def __init__( auth: "_models.OpenApiAuthDetails", description: Optional[str] = None, default_params: Optional[List[str]] = None, - functions: Optional[List["_models.FunctionDefinition"]] = None, ) -> None: ... @overload @@ -6413,23 +6699,20 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class ThreadMessageOptions(_model_base.Model): - """A single message within an agent thread, as provided during that thread's creation for its - initial state. + """A single message within an agent thread, + as provided during that thread's creation for its initial state. :ivar role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: "user" and "assistant". + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Required. Known values are: "user" and + "assistant". :vartype role: str or ~azure.ai.projects.models.MessageRole - :ivar content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :vartype content: str + :ivar content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Required. Is either a str type or a [MessageInputContentBlock] type. + :vartype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :ivar attachments: A list of files attached to the message, and the tools they should be added to. :vartype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -6441,18 +6724,15 @@ class ThreadMessageOptions(_model_base.Model): role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The role of the entity that is creating the message. Allowed values include: - - * ``user``: Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``: Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the conversation. - - Required. Known values are: \"user\" and \"assistant\". - """ - content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The textual content of the initial message. Currently, robust input including images and - annotated text may only be provided via - a separate call to the create message API. Required.""" + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Required. Known values are: \"user\" and + \"assistant\".""" + content: "_types.MessageInputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Required. Is either a str type or a [MessageInputContentBlock] type.""" attachments: Optional[List["_models.MessageAttachment"]] = rest_field( visibility=["read", "create", "update", "delete", "query"] ) @@ -6467,7 +6747,7 @@ def __init__( self, *, role: Union[str, "_models.MessageRole"], - content: str, + content: "_types.MessageInputContent", attachments: Optional[List["_models.MessageAttachment"]] = None, metadata: Optional[Dict[str, str]] = None, ) -> None: ... diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 84b7bd774e79..480b7afc8931 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -33,7 +33,7 @@ from .._configuration import AIProjectClientConfiguration from .._model_base import SdkJSONEncoder, _deserialize from .._serialization import Deserializer, Serializer -from .._vendor import FileType, prepare_multipart_form_data +from .._vendor import prepare_multipart_form_data if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -2872,7 +2872,7 @@ def create_message( thread_id: str, *, role: Union[str, _models.MessageRole], - content: str, + content: "_types.MessageInputContent", content_type: str = "application/json", attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, @@ -2883,19 +2883,16 @@ def create_message( :param thread_id: Identifier of the thread. Required. :type thread_id: str :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. - + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -2955,7 +2952,7 @@ def create_message( body: Union[JSON, IO[bytes]] = _Unset, *, role: Union[str, _models.MessageRole] = _Unset, - content: str = _Unset, + content: "_types.MessageInputContent" = _Unset, attachments: Optional[List[_models.MessageAttachment]] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -2967,18 +2964,16 @@ def create_message( :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] :keyword role: The role of the entity that is creating the message. Allowed values include: - - - * ``user``\\ : Indicates the message is sent by an actual user and should be used in most - cases to represent user-generated messages. - * ``assistant``\\ : Indicates the message is generated by the agent. Use this value to insert - messages from the agent into the - conversation. Known values are: "user" and "assistant". Required. + ``user``, which indicates the message is sent by an actual user (and should be + used in most cases to represent user-generated messages), and ``assistant``, + which indicates the message is generated by the agent (use this value to insert + messages from the agent into the conversation). Known values are: "user" and "assistant". + Required. :paramtype role: str or ~azure.ai.projects.models.MessageRole - :keyword content: The textual content of the initial message. Currently, robust input including - images and annotated text may only be provided via - a separate call to the create message API. Required. - :paramtype content: str + :keyword content: The content of the initial message. This may be a basic string (if you only + need text) or an array of typed content blocks (for example, text, image_file, + image_url, and so on). Is either a str type or a [MessageInputContentBlock] type. Required. + :paramtype content: str or list[~azure.ai.projects.models.MessageInputContentBlock] :keyword attachments: A list of files attached to the message, and the tools they should be added to. Default value is None. :paramtype attachments: list[~azure.ai.projects.models.MessageAttachment] @@ -4977,59 +4972,16 @@ def list_files( return deserialized # type: ignore @overload - def upload_file( - self, *, file: FileType, purpose: Union[str, _models.FilePurpose], filename: Optional[str] = None, **kwargs: Any - ) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ - + def _upload_file(self, body: _models._models.UploadFileRequest, **kwargs: Any) -> _models.OpenAIFile: ... @overload - def upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: - """Uploads a file for use by other operations. - - :param body: Required. - :type body: JSON - :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping - :rtype: ~azure.ai.projects.models.OpenAIFile - :raises ~azure.core.exceptions.HttpResponseError: - """ + def _upload_file(self, body: JSON, **kwargs: Any) -> _models.OpenAIFile: ... @distributed_trace - def upload_file( - self, - body: JSON = _Unset, - *, - file: FileType = _Unset, - purpose: Union[str, _models.FilePurpose] = _Unset, - filename: Optional[str] = None, - **kwargs: Any - ) -> _models.OpenAIFile: + def _upload_file(self, body: Union[_models._models.UploadFileRequest, JSON], **kwargs: Any) -> _models.OpenAIFile: """Uploads a file for use by other operations. - :param body: Is one of the following types: JSON Required. - :type body: JSON - :keyword file: The file data, in bytes. Required. - :paramtype file: ~azure.ai.projects._vendor.FileType - :keyword purpose: The intended purpose of the uploaded file. Use ``assistants`` for Agents and - Message files, ``vision`` for Agents image file inputs, ``batch`` for Batch API, and - ``fine-tune`` for Fine-tuning. Known values are: "fine-tune", "fine-tune-results", - "assistants", "assistants_output", "batch", "batch_output", and "vision". Required. - :paramtype purpose: str or ~azure.ai.projects.models.FilePurpose - :keyword filename: The name of the file. Default value is None. - :paramtype filename: str + :param body: Multipart body. Is either a UploadFileRequest type or a JSON type. Required. + :type body: ~azure.ai.projects.models._models.UploadFileRequest or JSON :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping :rtype: ~azure.ai.projects.models.OpenAIFile :raises ~azure.core.exceptions.HttpResponseError: @@ -5047,13 +4999,6 @@ def upload_file( cls: ClsType[_models.OpenAIFile] = kwargs.pop("cls", None) - if body is _Unset: - if file is _Unset: - raise TypeError("missing required argument: file") - if purpose is _Unset: - raise TypeError("missing required argument: purpose") - body = {"file": file, "filename": filename, "purpose": purpose} - body = {k: v for k, v in body.items() if v is not None} _body = body.as_dict() if isinstance(body, _model_base.Model) else body _file_fields: List[str] = ["file"] _data_fields: List[str] = ["purpose", "filename"] @@ -5067,12 +5012,16 @@ def upload_file( params=_params, ) path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), - "subscriptionId": self._serialize.url("self._config.subscription_id", self._config.subscription_id, "str"), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "subscriptionId": self._serialize.url( + "self._config.subscription_id", self._config.subscription_id, "str", skip_quote=True + ), "resourceGroupName": self._serialize.url( - "self._config.resource_group_name", self._config.resource_group_name, "str" + "self._config.resource_group_name", self._config.resource_group_name, "str", skip_quote=True + ), + "projectName": self._serialize.url( + "self._config.project_name", self._config.project_name, "str", skip_quote=True ), - "projectName": self._serialize.url("self._config.project_name", self._config.project_name, "str"), } _request.url = self._client.format_url(_request.url, **path_format_arguments) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index 5b18bfc78dfd..9cbaa76152b7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -2603,8 +2603,8 @@ def upload_file( :keyword file_path: Path to the file. Required if `body` and `purpose` are not provided. :paramtype file_path: Optional[str] :keyword purpose: Known values are: "fine-tune", "fine-tune-results", "assistants", - :paramtype purpose: Union[str, _models.FilePurpose, None] "assistants_output", "batch", "batch_output", and "vision". Required if `body` and `file` are not provided. + :paramtype purpose: Union[str, _models.FilePurpose, None] :keyword filename: The name of the file. :paramtype filename: Optional[str] :return: OpenAIFile. The OpenAIFile is compatible with MutableMapping @@ -2613,15 +2613,19 @@ def upload_file( :raises IOError: If there are issues with reading the file. :raises: HttpResponseError for HTTP errors. """ + # If a JSON body is provided directly, pass it along if body is not None: - return super().upload_file(body=body, **kwargs) + return super()._upload_file(body=body, **kwargs) + # Convert FilePurpose enum to string if necessary if isinstance(purpose, FilePurpose): purpose = purpose.value + # If file content is passed in directly if file is not None and purpose is not None: - return super().upload_file(file=file, purpose=purpose, filename=filename, **kwargs) + return super()._upload_file(body={"file": file, "purpose": purpose, "filename": filename}, **kwargs) + # If a file path is provided if file_path is not None and purpose is not None: if not os.path.isfile(file_path): raise FileNotFoundError(f"The file path provided does not exist: {file_path}") @@ -2630,11 +2634,11 @@ def upload_file( with open(file_path, "rb") as f: content = f.read() - # Determine filename and create correct FileType + # If no explicit filename is provided, use the base name base_filename = filename or os.path.basename(file_path) file_content: FileType = (base_filename, content) - return super().upload_file(file=file_content, purpose=purpose, **kwargs) + return super()._upload_file(body={"file": file_content, "purpose": purpose}, **kwargs) except IOError as e: raise IOError(f"Unable to read file: {file_path}") from e diff --git a/sdk/ai/azure-ai-projects/samples/agents/image_file.png b/sdk/ai/azure-ai-projects/samples/agents/image_file.png new file mode 100644 index 0000000000000000000000000000000000000000..50ae6c65367af30a10642fc910cef97bfe765796 GIT binary patch literal 183951 zcmdS>hdbB*`#z3qX-cJ1Dnv=jDmzq0JJ}>7D?(&%Dv=o#6`^c06WL@$QTCoi_TF3H z^Y;9_kKb{;{)C_7c)ngoJjVTgzg@TMI?wYuubYRo)Mct|blWH>D5$Ppk&vaJ*qlQ_ zL3w%$C4OUlV2~C6Cukw5Y$0cI&%#>MOqb%Oro}@e6APpJT8vh@X6E-zjL&lLaGX8O zsBdBM&|HX<^TGe`1so=3dYnH)C93ctTOVGzZB9W!Uqt>}XIyP5O0kZD;;O_&d7I#o z4r}?n{mXn4hU*5mux*>;@Z7YI_eowU9dF|S-e!BPt2M{E4$x?OUGn+#ZI8Y8@i{vC z@kh#EpFdj9^hEY`_BccCPv4eOj*-;U=8lz|#!jj|hKcDn{gsbJ| z|9;ZFXFrVp=X03OoV#H~^FJT-qDAxnPcJFqhT1wNCN_Z|KYnc4y7i*6@`3K|Zoii= z*B?4`NZ{PL2(53Pv1&PdBO@a>)NZns1o7NwQpvZSj`lDRlbsyvI5*N&z`$?Z{5(6pT$YG5vw*7s0_hT*cPe+uLX6$a!)Eu`uAX6jLq7LV0c6XdD-^u+nd|k zB2=@?Vr3#I+dDdLs{Il>#l*D!=+UD#%X93V1sGv$%rfhh{$h0d zyUrOor2zLUW+eRBx@&7|AI=QO92PX&xVE^5x6^tTO9y{p&`t`@F6O+?b=b5>=Yw8tF znr*AlTb}qMepuLMSAS(lam5=>^NxHm*KPVW;hVCpMm_CjhxT2d@^Jqyzb*AskeSPq zcl;(=?U|-Rc}<<2^gDO%WL5aM*P!9!an<;P%!@*IzfuT0F3G1Em$>hK%cUunU#?($ z_eJ8J4C95tnD;XroUE){il0%_Cf3EgFuHe-rSkoGU*~_Tv{?HKDk>^T<8jhq>xD%` zY7%arU07NQzY%yQD?f%y)%U#P(%ye-%bZdnLat588ipN>4GsH7+cL_^7@WQ7cn$vE zcjnZ{FS*R%7+LoPKMx;1yx_6txb)|HH|rDc&>lHw^)x;{zI1+Oq=hC~J+EA@W2PbE zGWE(rkFQEbORDbn*X(y#RQkupo^RN^^J|gV^rM(12Pro$!^YEii=48k%>?7Us!;Y; zmh)7NYRds7Tt8TA3HR$c z-u*(8q!9V1Ap)lBD_*mU%gg&qwdv{T{GJrPE%Y#3E{%97_y-1RkmBk)>!f(>~d$OlYz+zyR)AHzl;qu;jO)xk(_=cKfM5f&= z+r7%*t*&C4^{Y>xKK(t?^0_GW*|W{9Umh6NI|iv-bo|mP?7VikV0G>ghem!v({fR9 z@nqHcf4}i57qKU~O}i!1j5}Jdq#t9d?ZWku9jR$M@w-rRPW9&PmoH!X`TIY`sxb=) z9N4_`@YirjuN_W_R^y!_0TTQaB&vH#ea>Dx(~JE%F+DAwZqTqp{R>M~s7Udx^Q%uk zd|=G$q@|~SJ~8ny7>|QuV!YrVUBZbp?xeD=Tw9mp*I)l$nCubKc=fHMw=ZR5@s$FAvm2D5vUd#f^F4Ps8;ezSh_K$nHIO(Blxl@oyJuQLc+V z+Z1hV1om^N#y0U9gXYh`fWF@nGW*@Ge8+*NU|mY`4x&gckXB_26~TRVX%gN zDXK}Tf-E_|l&@XeHaFJcg*~f1+5HXq^-Em5j`TLy_1pKK-X*edNJ zOCi`0-<$H0?O2C?)FT%%g^X}~uTp#!a?V=o$IIg?_V<72oDmQdJj%?x;p)|^1`O-Z z+fHq)i@qryBNf7B(kUEKk9?$o&6*Lbq>ZIne5CXwHMxO9J=YyE-=9TmEG3xSYkuZl zH^l`I{LnPNKM)Poo; z@jiHV7cFg!=^@-O5@7m!SFy0K_`+x8L3ZZa>LcdWcOJcU=FFLgL-i+B3!IK@r{$2q zHEuWRTB@zTu<`qnJ=@TaF`5%nzhmAz?YSuz`LnQa-QeKhm8(}}Y}s*ZIaZ@Z-@Ir) z*-UEHe^jVooNZ0f++a61vK58O#b0j+3fNno`+L@J*uZP7jYNt|ImW`mBqE~LV^zAw z&gN~yE+G;96%asXH}ji{$;JNnUVRC_8q@B}S&X$G6crVHXldyh5U{Vdu5LFCjSDWl z2eFxHH5xP^azZNjD}FJ#JYHZfYQn@k)R}i~s3|!)Rh{DvGaK8s4C9U>WJk$lNl8h~ za{rUfnWow=g6@nqCf?byeLKzT>j7-82*z-W!CICCg_^tNRf=KKcd_=^H_yrqu@5Ac z=SKew4{yJ9>lPCJmf}Ovk&%o%JbO_!1RR%mjJxvL?J|$h(HYES)9jWhe8;WrR@tVW zYqOh{Q+@C2kdP2=rxlx!uJOUaz+pZV<7b~ge=c7tDUp!4bg8JTi@}%S{QkVTrCCFV zwZ;Bs+;o0rdR$zbn6Iv-cVOl@^S&MQ!Zwc(bqAtWjF5&Fy1h8%U*?cHgsl5}bkqmY zSX^7X=Yk^B7)p4i^*H@ldzQ;*y($8ZG%Q)&hbQq5ty1rUY(GBjIezWK^*0;?)nOYx zDkXCqbKA3L&%{W}*Aa9TXxn5@BR@RM7!Xk$pskFDFb75yoGCOJH_n3F6U z<=NQ8%e!k`YPZnQqZhS(b@lZ0@`lV7CNyjGHwLgN6akP}Iu&GRR``;Q^QXv_ZMdVO zqbA)@$zq^-9bWMyFHhuAg0kJrz;3BvfyW3%MtqV@RlHI%mq9)2X^s5D>gwu?Np~zP zc&tWSU5|>p{mnV89VloXeJ#B0$B(Uf_VbtC@jZ}^xI`s0a_rbK#$(4e&X08zBWic= z+Qs*)LhOq z>)qB@8FI?Nb#Q##7q7aD?H+7+rF-h^tpoR)BCpca2tOEV&$XrbeDBwun>TkMYBz1) zPhM4;r1coPcNaUp|4#qr66dwWnB4~Q7JvIc|4PPp#U0CjU}Q9b%3f4eb<3tX`e$h= zC6c#TqDnf~!`~MG1@_`SC!|7!Z|`bgDQ`fWN}8|Ajx7Yf;FvksH}~ruf4ELYUS3|| zvmc|a>8B7JF_!O0GA7%%$Umy_TDDE z;giFnS%iPc18jrvV{xvoo1$;Nb9=mDvlz>(I3tt$_qSSGTN9ur{%q%4AxpNgj+_%e z6W`I6=F?N}*irH`@T|g~4iWpglKMEghRW%&fdTiW+2IJM)ujU>c051@ZjQ@yTdDpb z=}e4fbZMd9={Ki@J-^J>m~A<%+Aj(WREo?zIZ(r_sivjHw4Yt&X+yly_F=%MkJCCGC{7 zv=T%bx7CPpdc>R{0QA*YEc&&PTd^(3;{3*C7Y!p#ll_%)zm7#7C85g_xQuyeW>7or zMdVZu?k9YDD+M6M@Ba7BZeCtqt@8pxLPc$DhdMhu5AYcsKuUE%CD1~}+$60HR7!F0 zw7a{f-_3V?2<+|QmL%ZuH0xQi8-R0U>^GPX)*d%%P5W`z@;K8f@BP}1hxm*xAW$TS zIaM~NN}u1doA$@5x%Mpc<`hjA{g8+VX2k^M$6IMw^lwGv1RmnX0e!f)b*$0va5js(XAzYg4DXHip`Dzlcc4S*l7E_=9J@Kdbp}F~43*L2q zSM64?`xpA&naD?*2A!Mb%&rKIi|f z$VF{lKzu6LInq*=Q%QKzaz0hj}lRa=oXA8j7v$fUbLz!+85|RJ0QAr6Kn@s)A+l+?F%>k#BiW(Xel2cQ=VpjaL zv17vjn0Ung&dSRA15B@U`~BzMyez&br9(|FH>W;E zq2HM+_~k*H;@`f66HH9gePd%g&!0bElkM%jBVaJxVjmI!vXy6}BeKUpZIqXO$Wa<= zvaNwbC5N$!nkiEOA?Kad(^PY9CLO3uN4mSU>Q81)nTf2Sv@x@=TuQ5~s^Yv=w)o@3 z`@4U?zX-Q9>C7zzrb)8Cz4Xw`tSIv8EBQu^e213U^N;! z#gBv)e`ep;CJkpFApSu$)Ym#fb!xIt+zS00NAol)Dyn?E8->xQ^#U`embSJ6or(8h z!9CwR_f^Q*?%%&((jc3gDZI!VtUH`Z3or~2V0Yp>Axnc$&V-Kkb}q{y`I_wddikBZ zZ^&;!MV0Jbr!GUMg;MPGdUmizyfv`^x9=#dLNJ}kH#ZH7F)u?!fZL@XxZ<4dFtci zqpzkp+>)5|_zH5dOuB)Bjh&rmBW+Y?2XJ*6Dyif!!pSY)ak($+XLA11HQl`~c;|0~ zzTKOK0%zx%nXBNpuEsN8^{PVSex{F(jy?!)-L?CL=(hO)=H0so0dp?tihgvXIyA(q z@mdJHqOjoK+Nq!oE5N-Zyl2MPL*;-upFDX&YA|8Ug#UqWgNIHjBng3lNnE+I1>p zfq@$Q*#GqHJtS_BM|NZ<8;y4rl%ZwF^+A zNEC?41vxn!^p#S;^NF6Wt}Bp;`x+lJGxM~(qM6wla_QaQwv$Uoh=97vCr$5Lf5E{) z7=Iy!FEdj>`OcjPe0;3(C#IqLxDD5@R|q$p>`$?mRgf2LL%alJ4Y6URHH+O28ZqqO zzm(B+^V+o{a$iS|_+*%LZ7UEVL>{zzy4{|cpRiIa!tQzd!eh!2DkY zE2GkqlHVY|(yV(~Wx_Y22tGpsJaCM!7{m;9h@c)UHvXLQ`Lm0c*Y@()>|P%g;yvBy zcxlHw^TaP)peQOTVti8}lWSw@e^QRp=MX=~OXjB~_}t2nb2f`p$7SW@{`=!8rJX)> zyl3=%8=tuU2>;&EaRiGVE*J}NJT2cCCjLyKqUGw?P(yqKHmc3U@5d9Zg)W-`uAT~y zUJe9}Bt#6%{g!fuK?55&J>816Y?e&0;L5KdbCG=QvYj$6n<(CjI@;y$SyEI~To{T^ z<^l-SRJ(ME(rTpnG14+iRc~J@64<^|iX{N0u9hZU`Mj-bQ(|fxc5`xaQrvbvKR@0@ z`{0~$d#143t^IuE#U^)@mH(dG501+PcDk}MT~(9~7)x{}0D*@1(*X7)UrhH`(Ke2@ zXVJK9rsi8P|Bx#?f9^qfSHy^6BOoz($X9OMkN~#$Gc@FhhaAfz@jV&QyK&Q|Kd27w zUS8q`25g59AN~#L08fF|v;PG62En!J`3@e<&B~EcQH9^WZAwZ?nwXk8(>fpUzMn-x zp6fqhXZ>?}k&kB6?a3PFUHm&TOf+h!9B|v@x3i_usQKZ&Bm2itdzEvn*moWlcoZ3V zEMWe&vhoL+h)d8{o;*AoL<=I-obPxJDbx*@tlysGusAhP7eh^IVqLq!g~kn$*jn2w zMC@k1LZR3~k?H(Tv@uDI5XX221_Z0ZyAda7X-MAZRdcMUsOg2JZ8fBjN{cr$I6Ok6 zhne=2W;M=2Ymv7Vc>Q2*q$RvZ`t@|CS?x@RLG5R~Te)_#*OWyc{-z+Y2t^CEar2!o zMuqr_hWreuID`ZPn!JkHd=ZhUoMC)qC%v#cwp)4|GMU!QKuuBboMB_a?wgyr>}K@6 zkDOB+cck6ed!MrYNDm&gq{t;Ye&f^Lm1O^}fOQc%gBVrlj31321-&GXV5~DQzIICF z-RI9|RWgiyY9cNZ=}pB~@=n6~>gM6&{gXO}&ffDL;5YnFNsM2y4z5E4r!__ywWMAe zT_^kYd|Phj!#_{TC!RVk&HOM=6)hk{9=ikNN2ABVWo~Y6Vsf$&LIM_mDx{)uZrOCS zm6y=}ix)2%blnLP+jJv{=jwx77hJ_2uP)DP1Jcmhk2EA}Fc9i7)R-7Arpnt(n9gyUh2!h8B}*$qxAXJ zlhRUFCypH(P%9;LDlb&`OknI~Yt$pjAI9P*#!8W*2r8BQPXlHm$mGvaG(8%Tc(*b*OkB*L?*fMec)eZ!ZW-iYQ7W>(w6O4ETJW1@ zK9*j?u3&Lf=Y>Ddh-w2uy-tT$I@7er?P5N)oZWPv&oz@dfVnWU-tt9`9PmWtY>PO3 zp-61yp^y>EN1b{0+JG!Y>FxmCsLX&0;tWG{Qxcai_uEzJ>Z53YVKYO6O#T$+e}hFF zDjb(lbBe<%OH53R_P@1N0Yvl@7ngMP_c>Wvg-uPgd3Lj6_)jAF;Ihg9){g8;4%N&L zll)~MwN!LGU;U1XS4ZDGl-S#=lR!3l%Flupy4}0~1Jt^ZWz!6sDj9V%0KcMi|Kby7 zIr@<=4{+V3K(Y(ttD&L@3=GV)9NvG)ht4(X`kTJE&E4RqHStQEBkI%0y zYydIYCvvRZ%W}A}u&RohmP57L&fKqVs<(n$znU>YqktaN;@7O>{rmSbZ6=urcL3m) z(QURf-%U&V40~F$@X02+S5y}oZ}vXfw4KXw$s!=^jk!4jVaEY33T)t-w*!=^bgVZ* zg!WDM{i<8EKXuywpXkh>=4a=BPCMv$wt$7p*%wD&m6zY^M$PaX^pX@1x>uhrDE7R@ zLWCn#W4DJ?F8h59Uut1{|Nb;PJ3`cVeez{7vGvhXA)cTFzp*-WH2fD7WhX#e4hY+D z?LV!|D7@p>cI+Y~D9!E?4?3C-q_RXpV4>Ht&4vn?GWGoojIh*ig+SdY-GGnEP?QVb&Jl38H&Vmx87jvIq z2+T0&e{0GGPI;BfB!AYc&Ar3oBLb;ZN1MXZ3j)0gVUzx=Jwx| zYlyl|D&JO6L@`%Q;hLcVfV;Lf|aw3jLZS#_VN|Q>?LraPj<5g zGG^CUUWM6AcB|NPvYns#{sSG0%F0T*JAWAfuaTJRf_9zq-a|t}q#&89JB;x$AIzv9 z8v(8taTctEPm7|Ucp+Odv9Vq2+o1?Iu8##z52;PKF?K!14JU-5;lBAN(b`4#c z50>s89))NS$?q9&85tdI2tA9RPlioN4#!0xe_EqBNv2goT0wiuAfpS-3P|TvjHlv+ z0c`@zU%9xrv_31x(_R?M8J__46>gYCMI=`w>5({M9+*x41VtBll!2(tA?5tN|F;Pa z9M}Tpm*~7aH1y`hi^q_3Gyz7l#(L1BAn>9z@`a1b231wnyAK{DYV>Y@4D1xg^E4$T zBlFRy~`N48$uY&BKY-^0VFRP+W)I?(8oXgZ3=Tep~@p;Ruiv97juVs*aD zP{evKWwxsCsrUAe66iDB!jn$-4s(h_L6^DzNU1tcJzeJcKjh1}`df?R1JFiXldQAz z>%RpawU6h2&?+C}I24W+RQO^aK5*O3p6u3i!+2Hk1ht&~>(;Hi)MX9XhNwm$OtFrg z_i(S1!Q&9ikRak6DZ_t&W^(J6@AawU)Nf;u=Bm?8%Uj*7O&BR^v^&+aM1%u^K{qEU5shc;y0eV`}cg{Xz zaQd2KJx-z1A&i|=SXOp9la&hc0y;LIpzPF$he4hCpqQvK(3WA6wA_s4=QC<%R8fkP zJx;PTve#wo2iMngsAmH9bEm%;wJ-@&+>nwg#gmu>UmfQC+k>6^@bCBC6t`(x!Ay$r zrS5nLM2SV*KwOqPD0?~vg)~gkYrlSL+DU`@*vBX&UdF7rAJgf+n}Lj!3JVB1ETjw+>JnxOcI#t#rjhGC$_33HZd80CYltrV-tece8n^kd z@8`h;mGo`g+}uUWz`35GBKCbtcUg}fUDxF>?NeUtiN~n@_0c;0_Dt@C#KbUZ)?IXT zo1fmqPO#=+~rFV8zgyMo+rV14m=*7o_qSPd`3{>3Tl8xHJ@uC8GZkLC7?5i z2>POq+2y~?QMb7)2Bb-k4T%745&)+KXc8&N*frH5XiQyj-L{WQt7J=nU9YzS1rTM9 zzK$EJbNmtdbSMN_^q+;mw9C+^pwQv%6eM@e+q?O$;Qj2t%J}fPmcAJ@z5>)E-9*p# zvY^>;_TJB3zfNt{SuLev5Iqc0R1CXj9YwN6K`HuApEQdpWz2xFt}Ex-aOW?Noy(hZ zT%6Kv&#|Vy&{*B<dyafMzIZ{`v$9 zOLwWyA@ZaK3a9YRq~Dq-*K}GX?MfQ8p`qc-Xd5k16%m9b>N1a=IMIWmNs23nTDE`x zrAM3okjA8G_r|^3HW3hvD|0}eidBf;fBN+44_N%ehrJ)PeerOkmb!ku7`11IC|l{% ztu#-oY#Tl*guA^#WHhxh-fWvy{(>ai4SAR1QTI2mp1ND-CD9C#iMqBAy(Asn7iu9X z?gK$*MMdd>7We&1|9JJ{w@CTF!Tmz-^6PlO;iWR4txwJO-FZYP)~G4Z-Z~g6mvW_4 z`+WoDsAS>@xb~AVe zr~Y1@!3{BfyuY`+D+`)FGVu<@{A|O8XVeS^c_y_b=%WF*?4we1!sd3EYtg%8zcVa; zWYF7;?u&x3KCF?#ot4h3T>$y%n%KuR|D392qwaX_)Izzl!`0=U#28pobNy545O40 zK`vwBYPQH`LS2!1=Yy9~QYL4cG~$K#5_iC*=Z8=JTl%?`L4}%`)%*m^aY$7lfE1b&oje zk<#J~AW{-0>HH4>;4dI(YI1GO6PIK0Wo+`Xk9m6O!wbi!PLLj&B># zInhk&Mo#m5*>N+7hx2>HJUT#WKBcP^c87QUB1YFUt62`niZgG)&+)I+Vm>E6PCrt4=t_uzayn^ri2hc zr-eV812)rxij;nRyn%GOK<6bF^432>O$i5N(C^4juUxK_lIKT5z-vFd>|%%&EDgYj zH7T0nBq)oD!-x(YDsso>JO24zvo`W7n|MTaPL4K!eXL?4-P(M?zpcaL=mh%t?Xt14 z8BlE+$r5EheR=|Fe(Vlj^l?0hv}fFr4Z%<(e`V_WpF$W$=-|olLBHa@WKr%(Wv%C~f)6O2qE3F&F;B`901Q3K zr@as)HA#o3`orT^yOx%gF!emUSfylo%AJSZ-@bk8aBEuYbPLr{HpK*Y(2_Ww)*qo| zW`O_T-r=!j%Fxw_p9r3yg9)P8Ci~V3ND1H2nWUD}m(!#@#~U0Ia{}KRe&$vR*f@+U zPtiQ@LCP&copEhKr_AGiUG)7U3K1W)WZBRXCAqvt2vTT3*eml_)Ak!vU+#hm!Pb@z z$Hjn04z_DI)}`pm2~y5b6S(2Qaq-v4wzxkt+(Zrhn4!Ol^q->jZ5s;6dT@-3KmanR z8xUT?!JiGrXSaDKHT9Iw595#A1Nw9@6UCwx8msx9Lr(GUH-xzSSWaR>g1lQM-#$w! ziTYoHk?-dtnLlq_=Zae`X_K|D!kQ!j71 zopSeL{&0{6*q`FT{h}z4m!BUd`R4b?h&R|)1RnOp%*@l67`^;M`w8l;FDwDHsR+xy=Xc2 z^H$nH5+g0EIJuai-2Ve~-CTVV68AXts3VQs=$RBdT$hmI>-^LMopd@8VS9 zcUYKEy-Z02ASdvJSE8ru%5YtzGa4|9LRFXLEok015d7Qw`E$iZNPMH7unvhCYE?ka zg$lKI_G#t!?_UX;H5c*94qt2ecbd1p?6#`*?jwR=*vxl4UUeE zlW3xPHj3CxT$p(zbwuv{w72Eh4=>~_FT1-6$X>V{`FHa2d=3i_cN`XwR+fTI256_= z_|Tp`OmteQ*i}#hhBC9WN$wFc(Wu74rQlA}Bjo<_*v;q%JX>1~>FV6u-ri1r9Ux&X zbtZ5J?k!#aU0NCsOj}ZhEt~eel6|JU6rU$+GQV)QKrclC#;Vh58I}5d<JhA1oxy>B!g z&W+s8i(c7~O80`bOK)YWg42MeqntD|0qH&L%RUT3OojZ2ywY#_Bc5#Fh^v(Wkw6nGG zq{1!HC~H_;rKpDPoo83w+mxbNEKva?(CHM%{x{HYMgOgshkSX^loT>R$%bs|rb*p%d_1y&e z4^Fp7aKge=yQSxyNq$ZI5Aj=9SL*hE1tvAoVvTr@98ef6oA*;WuoBrqn<~ z{2TQ8Y~Cg%11cLHSpC#Yz# z!Co*N##vJjknJQ|d`7oi|A?+wR@h7KJ=UOXG>%oET?>dCfXMW8UL?1$=>a+Tcm6XJ zq9XugqltEXrp4gP!l4L{d=!NyakqRWC8cQ5`5};!ze8H~!;OhVj}=`WIlJTFnT@wX zg?HyA+fMgEveC*PHrWC>O5gA+;EzZ^P1BB!E0G^qPh_v_DG4+yUf27AYNofR#|314 zYXzVvcKc7IK({k6i~M}Yf2+TW8-g|B;u}#FP&B(?Gpx_zu15SxI)4U0zl8E_nPf;b z*!lG!Qv0cjL+b#4cLuR6pT`oN=m*88@e4 zIwP`ETS`KwuRgsw4PiDQS|1RT7!{k^_v_to{n=x8AdeA*ur*Pjr>6&=n@7~53m0_n z^a`4xgTXv>Fv$y0UmI1wXhT%6IrR|fTsImnuY6QV|ERx(+x(#2AqwoRXmf{q)bHL+ z-x~qQr|WM}%s&XjSC0Cp>X=&$(DD`fN2x~fod$<2;ddrIJNUn3mmlWC3$U;>oNU<% z)b2R*@y(QYF$l%LEJZFJs&bZD)CAWtYzO>kH067zrMS*zA=n%Bv!F1jTy~5D>|#?&I=#e!epGC&`I+;?f&rQJE&y6lHYjZb zQX1v;1d_i1b0Lp#!r7I4#y^D*-M}(h4Tr}_%7o6=Hb*EO(uRJdL49yZcx$Y>#-Mrf zPMz6}4bQL*L|13zPzM_CPJiitW47T12zkE{)m75hC{AVgKXdAW!_U&_q!XXGld=LT zS_hr^j`4NN_+KyhNoW8+idPn= z&GW3KouZ?JJ>58jtjA6ZnY8CS9zvf{sbvg$O}LCR+SuAJ2S?3_*+xbLo;=H;!O%qL z4+G7gA_mT8Sq$!h)ShI1aOppd>C|DRV-gbndP8pNl_uNosFm00eecJO52P0$0T4U! z%!PBTtXCeS-Vx*;9UhKv%72uwSq44%Juf$4^InvdeJPbU85|NaOP{a$4ozxe_Jy2E z8lPrnsjZuZ1sT{#c!D6_GnGpku2ydGw>LUDFIy2U%_>jdJ$z_8oGF(82BvuDm7NZY zIw{CENMK10Sq>7Oz(p85JxOT8CmPXKq53cLC}$aAk~ z+Y^EF_aoZ3-|4DS0yRQ^Bm5HA(#*)*)YKlTlWz|J%E_dj)4N5h*7@#Lbl0^9Ga^dZ zGj!PUV#R4QGZq^+Zyug~nEIj8wp3{T?>BfQB`R#QjMt^QCH|B;z5aI7)pCz-*qR_- zTdB7vtL3;6$+JFI=H6M$XDkr=o8nFV%#y9donkhj7k@qZ=}XbFJAT|Jk0ad5;ANWT zaV$ggOM#C4K3$P;4Csi*o~hC+?Ns!_2mY;0`})mz`1+Q?Tw@l(BBp^rEq;0Y`b6Ka zLvZ?1qO?6NC@8pQVqgg^;u(Oi7WSasi9bH3BI>}?GP2cv;sttg`T1x1OdRG?ev~)S zM@P3`hyqSCt6sfeqx! zl#zjf*e3t8E&f#*L1VZ5s}_O0*?6cF&193D!SQCEXFwxyccLYBPoSz0Cx%%^V=)O= z@juJF%uGyqv4)@C`T5y5<2FS)`4A7M~tZAmoVQbPhEtR8I$A7bsEe>1&K z0Xw^Q`$Q@N z{n!U=_iblB>F{O9Nu0{jsrE1SARBdwh=|B6$K}OyHF^Suj?Ux$RsBoC5{gdA`NeNq z+uHht#y@2{0z%#P_p^m7H0g)2T=0Joc6)E=AgZe^-Fo#!6E#~YWky4b$FO>MI58tD z`4X;d-d{<^D&Urx$&NmCdRv{f$}XrPF->#jA}Z-eOiOOQqK}hf8NO(VQ~-^aW0#X)ImEDV?DhS*-bp9>3(K`r7LRGurtDJ zm7??GrPx`ejBHA#RyVFHc*LHw+zcXpx}t0}0Rq)hb3PyyRvmX65)lPdMNmmDjAmmn zAxS;Y6IJa{q=Pu7X^3a6CRO)3#qFwaLXF`OB>9fa`Vd;#`6ejHlX%^sZ7bP7aGoDi zlcBzM`Er?Xh)aO;W?tSMvh;yWoF21JJ~-UebYISd*B+j;_1LOavw0)|h8I01gH>l^ zj4Oj+CvX{K*1PrD2x>*+;_8y9pZq+%rKP)%&%TZW6kj3B%yzyJf}cX@*M_)?c8^x# z2Z75er?scz>q%`xqC8gjA{i>0-louaq+yP~cI)GmvZ-ur)ZHK1=Fh=oU^ zKor)Nw=tVNmrCQO`c-gQ!W}aw&qr37{RXrop%V0ql1aXC4Y>ESzl+tWxd|%_Y~({tw$A zyfei%YS3hOIJuw{?Zk+0G~>W>NZ-6k zN&VEJ;@0OGAGB+Oj|e=x(5uSyrh@p=vGE8v*0p;ET)Uo>Vq1d{+@zsLlgWPo|`d2B2z#qF&ra71XKPEmIF3h9>nUxcl)I@{9F`w;)mnwlCi zezCT;Y!X0UhxVeFwDc~R+~1F{_VwGf!@g+Id3GjG=!4x_L%;eF3dLk~A4HaWBOHCi zQA5N;;&cHA-`c zIFr%7tPP+UxKtX*0zpUdesBhWkXXQo;yoD3;1wbuyp%Sd&9VT{Fy?mLqob>5TT2cnxq4ht z_`@*n@9v%^Yi+uOVXFV+%kr%h52ns&1Wo;l%J!1g7L};8w8>gY3xZ&O#A$6&&*fOm z&)%>H>Mu`bOB-i}zIj_wwY)N*@>YHq7lFrIC$;SK+PW@;+Mf+yjJ_1yCNoEE2|%6U_ccAsv;!;_q} z{I}*PGnJD9iRS>o7U?8}0bQ~sGXSIbTl~70V9#&af7i0#-tkb~zP-Kng5n+V19rc( zy#>cTzArR?CZaST0u@>%S8m;+Z9GYR(_f>!7lPsr{F-+_bgIaYdaM7*`2+lENMYKj z_~Dk2&-B~Auw%&a2{dCetw9hoJe=ji*U&rJJ6rowsCV(*6x|`gf6v`nozXLvhJQQP zVNq6j{&(R9nnw2*_$ zA>AM0D_Vqn?K;&Kqb4a$_D( z*ly+u&t7XdQ876ZVMfo@cb1)rDUOWJe*Z~U28%#L6-W!>hH4S=tfG$P_;E>jK8pHe z4UOK5X?*B~v;Fwq&N_cVnqM_8+Y<5hG%+zzeff+l7S-dmVZ%pPSP+JV|K{XRf7)+Z z&i-<`bvpG!9}ORExwE$0lIkzb?~dB9%JteXvnP|f417W9I1%kJWg-R~18dgI*uD=d zi_oWoEH-Y@=C)dy44!xPec^fX;#7_;eG8?cc#VE9Joo;Y;cHX#gBO{!`)C{8X zS0Mu1?)miTlXAL2KsncMd5gDaSiC--l#Biiv;gPvuEtf+!v$Ea)7RMZ0L+05&)PU5 zOMgyH{jcRy@NX4|Aka@O3ADd{UHp^Qw;Qr27>8A^v>;RN zcs-&>dv<8<{uWgCZ;(fJ@7;SW;m#+mP2gmAQ{7~cdUqU!(-w2EMC>lC;Cqdf`O#rN zzMQm^_5G&Icg}_;0!mVzJl6*C(sZPmJ8az&lowOD4)W}VXeqIc4hTGCbXx4CtyxM* z;R?{nm`XPX2qvBGi2Cat)$Qd}lH0b!XHk>e+0$P{S{(Twas*x+_y8r%_Klbd_b5GB5InEnul?}rE9w*D9x%=wtUE||V5%LG;^)_(a zVhD;8(BwWkZ~|#725t}Vd|kBX72Ft!?BI>8DsMv4YWm>RK>wA?#xa$~+HDSPI?p~y zsav%?VIb5K3p+|&UB2q{LR%Y(a zA#IE<$@j109KoZCtKHBlZTkA<%kVSu4%krBJie)X*p`6)l1u>0VCdyK2^`>z}z4kgW% z@z~lJ6Kw+e1wW^yNq~sWuwlYedDP`ud*>@Tw2&3Pm|9u6y?b{kJUslVB|4r4{f4I? zpV~4lC6T!|C2DuHdm@CloYjNT2V>NG#Kgs2E#aiJ`^AivyEm~o_Dc=du6h6d$&z<4 zfTLH9v>L7?Nc_I+O|de?cP)98WEYKK@4(2nUX9B;lJwp?OnJ!IduC^5e#Jlr@fu`9 zofuwSWTkG29aELxOB#~c;<2h(sg>hbrKA)OZoAX*@HH0H*2adU)gm)^&9@p(JEK46 z3Rx@+ISkU_VF*zta;*^0pAS_ASSTrL*S4d-u@!)JuiBlYI~hCSo39ZNdu^r#x?@Ee zfKzGzuVo(wpHt8AgjrA-z5}aSz&Kmdp-;cJ;|9fnjnnQq4Km z$thz{jxwEAg-}LbC#PbDBCb|B14Z}J=nr-1YZ$tvgA7gE_@FEQ2=4jnh=%H?iDxM{{nT#5s_!vJTMMsC$smX?wj^m+&)sM&#yPyDpU3yl`&j*(7L^_PFAX|ft9e5D=6d~_!H`tYBenLY| z_rP@+#U=Vva&NdstvV%BgA-H%ze6a9yEv)$36xcZx<>d`Od)Q>P;8rzhA<0v1WVp@WUnP~rpt7<1t1bE7J zGv0Bbq>`vW6>1t@#Z#!7o=}zyfLwCTY#c(0T>u77PumTwk9AUt*7;aK1*UhQG1qbf z<&Is6_LJS4(0(>bar}EDvM}Dj5-p?(NuMCNy%eI8Sg!~ky13zX)6r-;^>Z<| zL>DEF-_HhLbeL7XiKKvGS~e9uP0hy$#@?(OYCgot7wqTv2=t|GU|E6a1PB}3wReF$ z_g@aopF49VRr=>*k}qP;>oqZy-7S3L5U>b`3fh2TX_bf#eV^Z|YHA8&M_?%WP%EC1 z;)REQTL~#Lz;Yj({4U;V;KZ-sQr?Q3yK zB{2;D`@5L^OXXTxTErNWo_x|dp9pyvwyMt?^eY;-ueI)9YWD+#i7E=C`UkTyRPu^7%iw28h?(ysnuf7FsvKJg$q-kux3Sr4BT^5P6;wW;5B2vO z;Lo&bADmi~e}B-_WFz}TAR%EaNC{a-Ca~2A{@YDU_A!_Xa=P@z9DSR?`x?9C#t*zq zPENjdW*YBYT)O!Wof%2<6B~#**;~l6R`0zdC3II!K_MYJGy~7Hu6bWn)4qAL;4%sU zq(lFhL05|!G(&6rZ-CETI+%LCph*l?i(L)y&Zs9>?m$Wvu$@AT%jEkqho2?~ne5w* z?s1{}Zf1#LC^`SR54e)D$tf2sH{O0QlRK+b;x0_}H~>CF^k;`G3hCkNfID{hPhzm6`?$Sj8>D;fGib!@wt)pl4%LbhOsW!sM1MTfUoTA=`h0aQt!7WFFjTuZVjEHUiv77DKoF zu;)-Zyc%uo>8}*2stn36_Egp?n-IE&2Q2lx6_>5Rp z^b$(cpecipv#VslHkMZocm5_K0Mysl3pZ(_q0biKkp^|Js`p?k0#n}33bQaWy`4(5 z7$5iw*+z1hR5Fl#3|ZGdGaDa+=&wnl=ZkaiMzzJMUKtE%I|Q_3nd{X|;el*(bUCg| z>_4D&62s&`gsDRjUgW3+GVNxGqI`BqeiH_i7G~;hNe=TFw<|L;Kisr&<6TULuu-Yc zPJ-aA0HgjlMq1mvOBZGwj3d#!2KRD@eVw^#xhWdg1gey%-vPEqwC~C zD>O9e7@ApFShUtw7R8c{dpB?JpG}zox!RL>4y^#l9Er1&f8-~W3Y{c~+2nCHn8y$d-qi+O;Px)P63^SHV!|^Suua4rQ3n5-h4hstl^US^G z`X-p1GF%tq8=DaW1`RjeO#Ye;4tgQxfuBLV*GGpzn7?8dK7&R#qyMX0l$=$Hy;q_M{Q`(Ad#bLZoy zacBT+fm<+FQ%H;kXAWoaA?uMzAmrjxD&qqK%CRpGA)wx#vra0X$Op{%2FrA$+=h<- z8)A*R7z)n}Mnkv5>n^T%J05ozcf(}@0vYZ1H0e@(#`QY9htt1&+5fuNEbH2Mbx{)H zYXM_TlEY+t8@>f?1eFlgW_mEx|AXo@iI~Fv3dH9lbi&~azEHkipO=Fp>B7qnT%G_- zZ%cEdUhrJ*M(SydU)KaM$7S3u5Kv`{fTrt`Bh_*_5^fS=owmDq3iR&@kj@{Jnuegg z^k_E^WL<{A_>wPuVy&M7O_nM^fmJLINBNIkO=2MHyL_al&>^@HEFfbj-i#sK)9Seg zAe`+EFaN>#!ss&e?43HkK%?&DC zeEVTk`-AA0t5lf4iA&}*U?bQ{&|3|1oW*GCJ{U{HNgtN~I${~VkWKU=c29xB|3Eq` z5)VYJ>K_!a1gUU^uSXNzucB;n?g*L}#G!_nM%U00)Dg5O_W+A|;}9l_+lfzTXlQ2b z-0R^S!(k*_scXA?dbrUYPJ7U{pNvNwy!R1ndT;Sz8>VqcEa3)REip)03ikk93>$b1 z>bF3!6~hw`6?ODP6m6wmz4nH~hx9A#j{Q9>biJJ14TMd?0E(b&HU5FY<$#a{YY@K) zM~M-h0-1y}BKh<25Tx_^%0a_xI8zAcj|8$7iePXUnU(-UH`M%rj4Bw*fc1Hs*Sx0m z;*E-)ZoLBMB|3i03D;4~kX1fmN`f2@k)NNB7Q{}N{*MapfCXGqU!N5Z)O2a)etWj% z_5Np4gnZ+wBt^cX>FEy6QVb6z@r1s5Me``3s||SJE=l&x;h3t3k)GXoHVs^i49H!% za)nIyxkDe)BKGO(59<;V68r)KJs`P{bmY(y_mkrq7&uN-cwJt+9Gpx$gFHZ_F>)#p zF%d%;r%1Icahz>XBJaoca>bL0z&ve{?UgWT5>a3WMUZq%aTbHH%>>0ZS21mI7j)hZ zpLaS8D1I6Ht0v#k9x`iTaq$MYG>S~_z=w=!(}$DYl16P`Zq^^Dgkp`);Ga4nn+6J_ z!oyn*!H;-<(~Mg7z*o2Lxa^~Z(v6$AM^gBRQ%47|cQ^ATA_DGe6hU_q2dV6e{w-sy zLpmcaB@(D(lhDSuZ4DnO3E|%JmH0RzIa(UuO(c^^#Ki?=rw~UUz!dNpN;$E&WBIoi z+<_8+hSy0l=0_aR{ocVWw?Y%Fo~z0k;JgK7TNkt^)crF_5{Jq45wZ3HXT!7@#MNX& zqD;iz4Xw0pzk+&=mPu$$YAv}eg3H`UWGHDK%>7}$a)I&mQ> zTVpCntU+IlmME-^^+>bda7MY{54+QchgTrAGhttl(JhioVKj){>MXGVRL zlB<%3dyX`#kn;bV$^Aws4(1q%Jr5m%OK`ipHevXB5~8MR=EHTUU&KsES_MAKTqWs# zSP4?SWdxxO5brb@A;ax$tJ?PJ<;#b?<@<2h#8q87@JfT&8yOcRBx%S2eUjcdV}-0K zWUBZyv3yJfaw)yT$sdQ09C_jA_aWVyxZH_s?o;eO+)d1tBwtyhzULo;Do@Jjk>c z@Zo>C@Xz=-HOM?t72X_QUx%f@SO*z-nr(d#v4?mIYI4cER58djTAmor{CvY|!&i77 za46O>I;Jx?ED1v>bkA=C$ry)g&uVb=;diwXMMg)!$U z2FID*cy6E9$0g^JM~3>pwcrRTSLS`*t@IDDg?RN-bKnaR#Gy_O*v`ZnNQ{U<#!P&C z`-y3@ujkbd9UN{afXav~iJTRVdLRmq{dICI2dUb>3>U#UR>qzGZSb4T2?`BOn#{$d zPW-${Qr1ay88H~@3!I3<7Jma;k-&tn8B(_2+qbb__yu3g9#N*;HK(`zo1jfx&t`fEnU_vT)pogUE2nTjZ$zS{>Oz zf#iwFg5BVY+B0_j^+=CKUHKh}uJ2(a%VM;Z4BcDzf`0{Te3GrlT*n5D%67MtGz(fq2R3e$ucU zr?=_cyVng8PDir^6YmR?Weor64#%?}C;Wf3eFt35Yxw@lF++rmsAvx&w2M%PjI^|* zQyHbCsX?KXNGS~|QluU2DYH^(&`@a@?M1Z5`@f#oq0DoBzt8Xge$MAu_4a+g-{-lX z`@XL0zV7{MaC>pi%a9^2XI>%@WdgFAPV0NIDIXjd(80T7nPgN|C@LzVLG@10mKYc@ z@O`?#L_7@8d*F&s8Ij?GGYvu2+a?Wk7Mj$p?_XvW{M(Vk<`A4AXuF+O07XTZ*w)_Q3gmr~#Q7v7mD4__V2YtJOZ3X%CJ4GT&mu(=L?D`M_v;q5L!_J7G#P>5b5|WJeDbb6 z`#WTrMf)p;4Z|PKhTU<6j~U`#>H7M(`1r?&mZSkzGhg_rSkcnb7_B8pnv}0f2x{~u zs&Cqarn^hvLJ#*J(c-1a9+six?s&WxfD>ZZTdFY0 zvXCGIdzzykQqOg@U0mGUhVtfUDIkf9gjMG{wP1;B-G`XbIE>B9WMmIufDfF2prdsX zP_anOKHj3)#sWkINmw?N=1j>Y=FBtoMl5?;UQR_7%JV@+#G~whJ^WxMr|lLI5kZpB zU~|kH@!})X;@rXZbM(xvXdY=EmVbyYBN9i!Ny;ix8m=Jveym!l0nL*6_D*_;4w2=Q z_ZO_@3{K&vYubKy z={jL)UU`z`!LzC!Ff?g9C#O7g6uAxE6R)zy@YpA{LXJ2YAfv>lO`A+U)Nh0(fl8^@ z9xv4C!8mAe^;Fz=<5PhXwmPxU0xM zZxF7?eT7eXdxs@x5;66J!>6$p-Xc+gJF8qw)*&KCK(oz91SA8yez{bBwKxu-!8&AX z8$)Kbh@bab^Z);q-4kI*L&(00-R=LpgPqLD;ZcmNC^ zvDL_)l#Bu%%#S-(@!_K>TV-T@|CPT z+9jo979u%pQK<9WA!4NwgIV#JWPK?dN)MTwO5MvjVP$jMMM#7AFl@ z0}+2hNcWZr2+-p;ZYHj>IK9z4v&Q(-M%NogC@_es3hkyDFc?G@2CGd6)N7>Z1VOSW zYQlD8&k*=Cd@%@mgz|@T`xR+4+?SwWRp@FAZpIQL!t}dHbaY9q2{yt_n6qe+3l3t4 zgK-}oLl%J(s<1C{Vv)5!P6py20^!6~fHUOdAafHH6(w{R>F^=Zu5|S(Uf$_A?&mSt zb8>UfcziV-O%vFM!D3g>Q{H8BTo%TuhA>WQca-Q(fLS;=IUmAkhxj@nHH=V+b|r$c ztNjFL18;W*@*hxV6LXmaDxW{!DXJ1YoD29B>zSZ^@YP_4X%WNllhbk>W`22((SQpklSegXDe%$Bu*EprC4qDWtlrk6M^}&qaQ!+x9_)Ms zD=br*gH%$TpWhq)&mQHXMhr_v$-AM)!8(ve<+N#<|4HG_e1XBx*h5ATB018!NHVDhRr2~lI= zO7yiko*y&s4&HQ;D9+gU_QIC=^Qj_4X+m|Eq#^h-MQq3S{qxeBL^JqSxNHg~FX@95 zHGMw~w<5uK304AH5eZfp+(Uo_;4gxaa$mnuMrJ@yzm<+SyDYAPa*#B^1tDEQbCeAB z*q%`N#UMo3ar%M#O|k70AZQNmK#8ppAVJU>`}W=4xeVX^<)xsL4O)hQfl&g!{*nvMO-6HN?uuHJX%)zmQEkaZxK%k9o`G0+5xD+qH zcZl}ZyhL7roccJ7y8DNQ-XSiWfQuC96_0>`i%79iA`u-bBAG0>Nn()#ZPCM0WjN4% zeM#lcr*nE#>b>miUwrxX#Z+g(abHFL^8c#``@J<5lehU3^J|#>5_2pVhzLPMIR2Ge z0C#3CvZ1l<6;;o2kkX7&-_W4{)_>08+3x~wIUL`$_kk!k?|}d>rPbwfbc>oeCSNr3 zoR}?kbuCGqzkB`49DZqbfQzdnG+2tNr*oUfzFlqt-u=wIZ|^>Tb>XvsXaC%}mXE!? zTlDleU^RZct$d}Ts`L1(b}5+o>s!fajW%AFix<~f2)SHz9lLV8w;bQ1wy#%CessZN zl;BG5;ZTcS)x6TOWo!7d#c0>`&nFH)5amU_@1IY+=FtC{%e^jbPb|I$E6eamX+85V zxAWW2ez=czxBvOsdWCubr*vAkdwf1&Ys(JQ5nv_~9G~#W&EMbqbC3OgWy9~90YuXA z+&TX=CYdcyzdXsWpNsLL1$mNRe{>@W2z=cnWG`HJsMTQ3V#a@20sm(M1IS`^ak=;$ zuTte@YsAMcimbk$p2ZsUBdxaJ+-8I3ZHC2*i8m^c$NbW=<=^eOwU2cQel^iRc0WXe z$OGlIX0e6*9Weemd(vf#f4v{%N1J7frJoPW^~!GujhT44MDW{!pxwC1x`E?&h?s8C zSbU{j$^Uvd!k0+|x{*+J{Fg<7V>&`cvMMc`Q3{a*7>@RD$YA_&Hn)H!y#Kk znD}x3{P^Jt@W6PEf<{M?KMThxWL1k`2kPSLN_Y+6`t@($PMke^HcIv&xW)5mBt?~X z16C@AaY1J59+(+urkTq%b+CewL6ut%@{jO)5O~6-kp9@QW27}4nZyf2s&skNU3B0O z6MkT3F7EEmkg|a%EQaCO=hm_rNO@nTYqBYt$hl0U$oQDAJHFH9A~(A9D7c2dt6b+S zX86ORkhkO~mb*Y;Zucf4LxGJrA%IYzk;a(SM_zEiOknbY1)(h*Sw}0^pq*h7a+Kg6 zWUY^31Af=G&lnCEOF()#goGHMCy3%~WScnzEpJ1g39&-MpLJ1S5Jnu#mhoNP?_=2u zMz0!mlFI;EkO9Q>roP?*C?9EZ0`3Y`?^Y;D3CxOczd|EvMMXDCSt8WYOopTDm0dkU zD539}x9rtH$~z0HwzD9jiCyU2xf3Y_A}0M^AYdNnZ68uS1=lf*qo6|-K%pSRB7_EE zqd@eaF9HYBX_Sbl?dG7Ef=PG!t{%?uE01l>e6DnFGWY_>u1Uul z6r>Hy1cis&lM}Y2-L~jlGWV(=@~u}@RX?8jKSf7p!6N4XGeh{_EO>RwomqlwI14wD zpqJT3`fC;s_I0^K5e{BxR0nO?d(izY9XEI zr(!u?hVV1)Ic}pP6?N3(?CdRuR|&jeb1J{V^mT^y+6o&|UyQvL)7e@!DIj;mQ(|9% zTPs!3cMCpOU~@^$fJFl($R4oWfbDLg(=3(CFJ9`@@VmgePe5bt)mUWD6bI~s^M;%BB**1*KjycB zmC?@Fv?w6oU`0b1&4|i^Lcx}P0y5n6q&=>C{lNQqiLt3EDF>iLAVVBr46x1zVs!SI zBJ|DWA+HMy>9H3dA$CyoI77X?YCv%_jK&)6`y7zxyK2>T%Xy3{m!2J)fYe@A164g` zzsKH^>_487>ICls4PBD96T1!Yfj!1}2xWRdj#{xp$Y=v8C=9_|(>9D}h# zQ;pkzZD7aT1bjw3N`U^%0QFBid;y0MoDqB_Wm%+i53xf;Lmh$5Zbt#DE{TKOeOJ%( z)Iih4d2KV5Lj^Fes02=&(bWVz)(-?P=McqVd-!lzRT`27N^YydjT$~)Zcr64>s(BM zC0B&7PFG_iSArhh^y#NH1yn0TK~$O?KR$s1EHy%E3Qi|%q)ixX!NDQZA#5$h)%S`T z>Gis5!tqOLJr;5?b4cM$(F;^h7AhV%#^AVOCTazJ5Y}KqB<0Y|B4iNfm@K^>Q&WHGTU=J9*iUwr>R+y8PlKDtK70iLrkD z1}=-^#+^Az^Ru{F6h|+~X$4#x5)6N=%cq&6$0KiP_@=RO3R?9j4x4UD1Ii@;D-Hql zJHR7(`d44Wd)$^dD9M$cV;U@WqQYMGa_~SPT}_ zNmF1e)COd#6wv@CDReFu@Xfc_ZrRjs9UbGF3a3Pm_VPquOJn_3q)TU>9bF@yYEk>v zD>q<~P4mLwM=pN|x?Jg>i24flU)*GF7f=Bbi<%MZuIHO!+!SC9LRnKDfZS0^;#CNC zJS+xJ^*gLLfg`-3wkEBDSNGq4$JtEFIyItxXRuL)OF`$d)COS%1s}7~9l^ol3h$pH z{tQLc2LT}4Zh{Pc)Q$N5Nh|c)wQC=!Xms$d!0-q#2`g4OJ3BY2heFeZ80XkPSo5P^ zW`!SJT&4xK`kP5u=wr&lxYyztUNr+()_xpDys-221s^?JhvTS2SZp3STc0NuV4=RD zVrdYR?S8&Ik|h(9yH$Pj7=;C0unbA@x307$J>uvM)wa=R9kFL3I(Sn@e?W$hQcJZ*Q2l(8ZWd~Dej9=i{~QU50=&7B7<>~QXC-q zA{4N0D)NJ;$Y&=|fR55Ee>y<35DUT?`9uX-Y^I@x76^zFIadq~3>S3yCamJSCqa%2T6gP3>%)+1F6oztF8Umb6zF1D zkmqj(&ASJk5u{SU;skO+@esPxk7$|bGY+andaO57-m(c6sVj#!NJ%k6SRd?oZw)|g zT>C6Ug)_Hr>&G-W1N>gGLjGMFa40gX2>pmriYK!MTlvAv6QMF`%0e20=GUM&Eqvg+ zXujL0)-Y-Tv*-l~Zch7YDAby^V~BNKY%fa)U}iUDf%N6hfY z?cl{5m))HsWQ7hhhJbI^cJ%9Ou`}P+B?Z_5f z+!6ygW84lx_1(a}Sz%%|^ z?06SI5KlD%YXA%3CYT2Dbloxd>&No_qSQAo;mZZ&{%_}j{sZr=EG(hHduh;K(Eyo$ zMQ)x≫gTEde-k|6EP{8jj}ceyw}sfmHAL@W_pNN~cesMiuZQKK#=$^?!zI{yYfv z5~2oqe*cp?36~R=yCXS4nB0Ow=vaP4`F_kYL@$alFj^zyyqmX8h=AeS1$;LP_+I+~ z1OBNVq+#Nz+0GAz))x>=JOaxKvlRXQ!}onxUjN5=vyo?BtKqD{wq-z#&)yL2 zM$mJA9I1>Bv3r9D?JV>eo<)uTyWkhEl)ZQ^lvn77s z#IG4A+SCvBa>jrFklql$06K;#g!l%h`T!Lk5@lHZ;S?f(7vW`bciw@RdNdRg9DLX0 z&88^L)Vc6Ae+a#U4v?_s&EL~a!J7($o^?W&6b6Qh4@kfW+X;bI1O)6495|^ z^j=6N3H`&0CcbZq^6P+IWLvVcX7~NF}66nQ~p~0@M+tTA!?}#m2X3 zsh92>zomvWpct6|W)1%vdLH$_vj*ntGQBP>m|Lrq{v1N2kAN8xW5M{7sfb7_)~Yj~ z4?Aose`-dKUQgCaNF$K?VZHnt2u?w zkh{&O#RDmZ0fo#PftmY!2-_W}&St5HpQkJW3Su`= zv;x!#&k%t}2l^2l@c98`&4|d8QIbjFka@(p`~~f>19E;-XOV)!IF|Is6O!x-0tX{# zV;DR-z_6fhLJJg;NFXfy5;qvZ86nj&+@hTJv4VVqn}f*}r`Xg_zo5 zxDu)wh$Bw_QWRQ|96TCtecp7V*8*o z=v8qUdobIIhcS*cdJ}p2kNoP7b+^iaXPuC2!t)H3$@1OLIv+fczB!Ye6s*%NIlp9y zR#qba^DHsvsAmn1TMD+7ye4cFMQEw@?6o9s;zOkXQUAYT0xAa{`!%ToTc$j~#XS*P z&Ss|TeVOC{ivu+U{`H#Tn=40f?2>{Fa$=Ypi2R__aLIjg@kP8XoU)m;KlSxh4BG4h zzl37(2~boFY@3BV0^L(g5&q-i4SuT^deO5Tb8+ugr6w}cO(x>4MT`9Im;!No)70c! zRQ3I%yqvnWYIP&%ozJ^p+GtU+Z|Tu+dKz%dy@((9lY@_Vq|)!e%QdbOHlt?|N&&W# zxwVFi@exp-l8J7Fe}lD1V&>EHE~fLEnh%m2q6J9s-zpFJ(A>(7Di^qkQZTLrs3Vzt zHu>HTz9&Bt{(^>|Ms(%1CiDIMg{dB&8k*`~xn9${?pW;6rlOQdOR2_g>@&j0j z5^R-BF^1e7vQ?tX{ebC|K|nKYDh7bkD zDgOC5#KK=>Y%FA=+lLR*sMHBpN-KIzzmF<2A0m}Rd&x#FuL&feVnN;PO&s1MP*F_U z_JCf7pIiyV;EVW%EJ@g|&BYY3=`goG?c~(EtD68%XmNML>F!bvTNHMv5D|(=N3S5p zH4!i3ra=LXq?x4I9)2^2M{uN|2Xif+&k|J7*)WnrunJSWN*(EAg%SQGNEuzqk(?pz zop@*Wjcl|B5N~j$*g`Rn?=C;Ae(}1>S!nTM+a1feg=C%pMHKGaSpy9qM zE^tj}(6z>n!viOf;PH~&R}zLTU`;5F!gKaxTA7$X5{ zmdqYriV;6}hN&5yN^9ZV8eG5=%x5I*n$9)Y-|C?I7nl728O{f&L3P2GGv#ye%>*Mt!yua8ywM>c z;ZJ&uHBPA($pRd>Bd`bN;kc)fXy8N|PmeBy;}BZVz&%c&pr_d#8Ggh{Gqk=xTHRDz zTg$ikdQ&O(HXMm46e12sG>fP)Mwg49m>Cy^jHxGlCTFaf|Jp;BJWODaO$YdWI@oUS z4wxmoBO95%`3^*y%#fLv>{GS)F7)h(?elzPM_mMj%GGbNnJZoO;$^ zilXY@&>&qP$;cf!hc^fcDs%`#^*?WP0~lQ#&u$Igm{&@C417?e`5Rs(XE-DWLO#4+ z&t%TG1av)b0fGaO#Z-^7zcK*!GUZQ-yTn-yjLgY#y@Jkr5MdSlt7(@2nHBJ;}l=W3Y@)RMoIO8yhUpT@27|Mt)UeePJW!Dh+S2burfm(p_kF6+2&M`*znn zVX?l3j*eMyXo=Q`^T(d%0Wxd^4_LEp<9Ej(p|8H9gthdqq1sr6mNjXW*=<-H4eG{V zVw|V|`?hrmLw%rFh%Jrj1;ot;aJ50*?(g6Es7mN-YiL<8U5FIU*v@Uk z_pyPN+*9#K=|b=f5}dFph(Q)54465Zt8T@Opw-fm3L*2O@^aydlkb;$N+Ke{8HZt( zFS@!3)f?8O5L56y*V|Sa9&#_H>YS`!4jl3W9tU%3?z6^lnWJV40 zX`4)KpwWnHTOtBqp^vBpw$0JiWR?J#S`Cqi@_yT)-slhZ=mQGuRYUia_Zm~KbTgmu z-Kpq8VDPMgnW}nQgyvKu{ez0Cp#o_2BS>g;HChV4u;rk9 z-eta!>q|Gh0B$sDP|W^(LrweKs*_Cr-t#cj=oHCHMYN62eId>w+x5$efe zrte+y#8DSf)Z}`RnI~F6Q2^xny`7TMl{{g*C0vizP?oJ*r)2b~7LY8WB*g)R9M-6uU&n2)LL*MCLK^9D zKoxNEf8}vo)hu@Jbl7pG)yl*8h#O>S6iUwfeWF}h%9Yu3a`d>}w;BlA>ZI~4op&Ik zi+Arf=VHOQ7)i$TyZa{DPU;1}=*e@=k4jGjF3BM*ff4gm=TH-t+cm z#GSfGFsHn{TkH{$4gZk5YUc+aqnE_sdvqwkzTz?56=)nTx+>9XplWSjxT>V8O8w5s zgBuYP&vqT-;(n)7rQ^fV4V5lf?0I}M$k0-7Fk7?qKUt?7Td zT0zI__Xnn6l{=q3yLn@KZ4gifV)NII8Mx9ahUd_W{1C=t2{qVy#2JS|<29R$Z))c6 z+O-SWl8|Eq2u=bBJTk@cpJvr1uwsLCx6GaEuP|YDPGiL=Xd^OQ^8Ef-Ky#j6$bAs$ zWI+y?uKWbC(nEy62^7+Nh0NRZiB zk?(G2DPqGE522=ByhAajm!7Fge2Do-y2$fkwfG3hk<Ok7h!}c ze@}9hVQS^4TYt6MN16IWYC325$SLhZK^-2beJzY%#gw(n4bebUD z@w&V*55W$7?F>E$gS<9UP4_S!y17(WoQ-W$e-^~jd6LtStD0C^F7UyCXtR}y0=~pC z;E`#e&Wo}#p+n5((UECuzRUewE|T?LxmmeQTqY?s_v(pLh;+?~$iyE%Vryep0onjh zgP4ZNlP6E7#;V1KRvDRc`2akQRK5BOy7;?ryI2Dkj}%#uK1odwQOmT_x@N#7GlI7=&gkZ-s^$tX#WnnY7=# z>hf}x(#L_{Cws35@`Qmy50Ugk?DG)k>W74OF@JVM-A+E@BPV*X?A?%Qh!{}%N1zdL!I{o;nd%Ehhw@t!6nuH6h z*w+ZfCaQu76jFoWge^11bd1epXdA@?XDg;H99dxo{*qW1&{8sNXw`Hrcsj1e1T38a zWYml{#{4tTnugNHQ6H6{+YmS3OsHlq8y_(uNrg9VH5_FA3&KdjRNU1h?>zx~8RAQT z?oQ&Ffj^$ysIoZ(oM8waC>hOHE?$#mPeyD%MVjSg0g2saY)a)M3k!><81LxVKpp9_ zhc~_hZ5T^IpajjAqWn3CaYL;`1%Atisc5-lPq+jczz5JXT0(NH5kP>Gl(^=~xHRe6jVr2Bg- zBfsmsop@qvuF7|reyP`V!VJz6-hmhvQ4s6tV6`lbNK6E{zz*OE^A;cU#OQEp^GS$M zA0uiWfQ@3(YnYtbfBIEG;` zvKX>~IrHZ`*4H0+lZbqTOg&_3^XmYv+ut5vMB1VVp7{eeEo+$zxa%E&E|9@Ri7|Qq zq}+YfoZ3KVRfXCSR;qk1-Gogp9Mcx*ACWG(n{qcz>2_z(8M-sU<&24Zhi{gZ-|&^>v6o6*Q`a$f&cW15no;oQ_AWFW(MUoS~gIl ztD5QeW@T=G?qO_X^(Rfe$`xwT|kQorZs-8?+5kwed>NQZqoA?f+V zda;gMz~f2pmvSv@T0L3Kd3vsxymXmU^#WYj)J6tG>9|2ychDT~a+tvtZMXBQO;`mE~cn8VQGQk|8fx&&1*J)L;vD zLqh{EJ}~$76TyWNU6Pm^sR>8PBj%Yt`YorYvJ}HqG=_{Lfg-vXyn(WcN+|RIKxKEp z!TM!t({)=J``Kt*e}N(?7M;c0wCz4RB^lR501LoJmIM05gcJt81HoZoVeI}3O?KEl zW06~z&QQyDW6q<=ypI-(ef@}ZarE;%%={c^pZ?~G!_Pah#U>l7<`APnVq*tOqmJBR z{LD@UksC`;!TTC?HPSm`*|y>DN~HQ;&nvc`@b&Rimd#;%E%5D66p918Z2Xe3KhH5e zZ28yNA2;nPt_A#H{83iKEPc|0<52V(`%*+N{pMM0iC-f9>QnZe6s?K%DQNvp-a%Qq zl{_ZeXE+n}^$AW8+sZRTJw6>FYQoi=xD`eis=mx@>>??f6pQ_9l8o}>R#YZ2M`=xT z;NZQP%YI%WgP|7P`uzzcRU_klbM*g8-xID%gLL zX(oU>TCujT=&RWvng~J0;~e>ILs2M>XM$=gYZm$Fha-lFHwBojLzINOhAN<%muT7z zLrIXT#WHqR>qSjo3#HE{vn|P9Niu4KXte^dyhdR9xUY(PcfWF_T-In$Y#BO)y?uOI zGy7`I(wB~Xi9`OG-oj2@i|oaR4kOC@Z?L~#eVZ~(Q1*EAYk>(Snpp7FoRmxKzxk|A z^6_Z+RPjvE&7t?@pD@S3_3hjm@_i^!|M-1RbPG5K>dKfI7}ju(TP;iH&JBErdvFA8 zr*SKGFpHa;;A)q1g|FT!=OkwAJ3*9h?OH`p9ptyJzga_;nj5k+ZrzfB+t6K70?t;*=BTW!6S}=a$SB2(jMr3@yTP3) z@94Sn#$C6gN03lDn6nl6ahDD+sZJWn#{@8cxX__bbRv|t-(Jf~C-n$6DqpK`(^2yw z+942-&E*=$l#$#sN6h^8b(A^V&y=@c$C(+Dvt`Sc2&4)Ib@wx2qs1IWH}0!k@l~D3 zHSStYrj(s{=S92u(Axd!=1;~IQ0k{>uiNb+-M?eoHWRWhf43K=CC?c9ag_RPCGAt@ z^zNk-=DmHui|m5D+}u4|ww(LZhQPOsy@lga|C+HsIe2XTQ|zFBZant2(z)l&(i`jo0QrJmxtRhnOtl-{zeiNxh7-`O))FdsdX;fEP_%Y(W@RS41<1xH^Oi6>(EdrahR=6Vc;>>= zgMDNG8s=MTK>ZwNB;l7n?p+)i^6B?hh>3`(HU(N;w_AUM8c&7|l`8HA4jMgwab^Gg zA?%oDne)?vK@sG?`^W?Lb-0X~qz8EPmgdS_?L2v29M*G>ShCycA3C(-?V0&+ zetYUOL@`6hD(4Aj^f@>n#xC%Y5{?d_m0lc-jEs{FeaHUAVKrmI!M>*pwO3RpB?1#W zFz(Y+=7eG|vS6Mz{O4kjGd#b#mV00N)<5NA|PpM42#%$#y0hk!w4L|oh&E%OrDw70JO zrI7C>qL0T%U&tUPug{*{yr;89gT;L-FE6iT)##$%C3dn?47cL+*_C(@-j({IK8y6U={S8ojjH2TkjkOxevtf!#uBe zdcHN~bTIpxYSAltSp1WRU?)w}$2v%cBC^DtcdCQ5>#9oj^9My9Y3$#BokL(7L+u~F zYZZEx_N9+|WR9u7h*M)%`(x0VmtbXCaR_ic9}mwS1%=B9tHi$sJ0L}Cfy3%I7mOP% zr#rmE^83>fQIL4Bk670B)!ObRPBy4M4ejdC5)XJ_Hw2?EP_lMxa-uwP5?TM0L@IdG zrXjBp{Y^+wE525Ploo|^0PZ`tT%1vQIdW=i0?&*T5FFBvGRQC5-o$bRjlhxsJxgvir zwnFWxAtPWiJAL=tgu|6H!VS4zqCo2VxjO^JmC-2{gT10(odmJ-x0gdDIXt0XvLC<= zE93Old=b_GOQ2gwatpIYW9>)y>l4Q(i#xF6Q5%ME@CpejgPsrzHiC#E3L`$%l8lC$ z#(k+DKN4v0@OcP2tz~UtNwKeu(S~t)#kgZ~&Kb6|i6PjJa(m6qKPwn%r)o7w?t;-+ z323|oaD&)Qy9hvlnv;>0H5QuztTrJ@-z)*Dh613Qp?Juh~w5_(l&8CN-i2;aKfe$Wfe zRBcPwg44^yR8s>ioRCIMjM(kO31wplIaL{!r%kCz zuL{GBMQyJ_@^T2DVgO%m^91P^6&aJZo#cyq6&05xx&!eO5e-F;hMT`(_Ql0yN7`p1 zEp4sK$|BJn*bg|N?Vbq`BDyUS+(aF$$YRP^Vv8FBYe7ex$gAXBU0wG=B3%aQJbeAN z$V6?_$JlE^e?OUqvtcW)(nu-u+nS}!;Xyo{&9rVC8Purdx| zVO)Rw?GuMmG<2M1=M`Noes$dp3-Iba!H;nD};Gow9k3!RVU@;=O@Ct{3LkVK_FS5Ypm*cQIY}hh%?e4Cw?%#SlLj3b#|fT zCwZzV*oNX>(tF(oQK2V&rs8l^d|`yuutkMdllCaa8yhlvd3)bRPitw~&dp|+y|D}Z zQM1Ztjctt6b4w2etB}0q?hd0pWme{@ zN=lQAvWRJ)eJTj-S(mynxXhj;yKr+C%7%Y+Zq{qec9~Llrxx2Y&@_)X(Rf^jy34Hd zYJCqHw{N4Oz5@=V1z=AJu$=@nvUi~W29ZV)ux%O2Z;O@}Pu;qijDYTrHq6^Cc~7C5 zO!LFZ`vS416pc`vTwJZltIMt)x~}#vu-YPgu$Bxk$b9$_*_ikn$9b4lEqQ)N-URuvT#MD<>n;*C>dgzCPGojs&n7gAkC zWGyJE5cdq3%@T61U>bnGUCHzJZX1-2ZMwYm-fbcKasVt!^L=^bw(}Jz=*b>&6T}m- zKP!4WsL_kU!JDN&)m!Ae3WAF#otXjiJ2}ZKZ(hA%nMA88nu9NpB!?J~)r?T_$MJQOHQ>sGDnFI3) zgUavqgjdza8ttk-!O+C7bAceuSG?z@V;^-3@4GX??lmqIO9EK3K`02X$nIgimwjooxN`_tWwdi z|L|~X+i>STr9crsf{y|!Hbh?Uc?S)>WCJy12f7 z?|^&ux(N)0ze~C%WX2CcE##}OHrf-hv*mG?!~)}W>1fPaXV-6HA0mMa6}^SC_l0P9 z5{pcv#d4{s&MeD`$8Y+U_;keIvhpRiTP@0tBThwrJXuzB>0sgGHiIZMkos4C3oMjN z`(%Q+6O1%0YF6wYsJSaJL8(d-AN!L42!*mC-&M9% z?L}~760#~go+vegcqWWC3=e15@V+h%7<>QvD-N}lBgLC%GnZNA#G@CZC40%S&sn8X zP~6}MEtcaBe4jgt6X~6{01r=?g*>R<1dMW&dx30*N$6NZ&0Ftk)))|LgE;1_&{no($rX`zmGQ90zG*(pYQT~DO;NZ!4ZK3A^v`?lGvJAv3a{Jaxb z{W{S*Iq$+P7h2+P3NZ}v1vcbdHYG_cO*^I_Zx`4=~HoU>5l z3Dw?YKCHatxrG#0Cfr&`RmO1yZKbmcQ<2hBO-X#rrTKhh^y`F?yNay}kSgxq=YNww{GB4CqsN>n>_9DF1p6t^KM4-CVX@v)& zj?~#c6Q^6^r6mZ3Y;5!+&7=xLTV<&PnVd9|pEdCOm;v)k9CQX&QE*p|tQCtqRflFw z)S=}PEGCN9UWXB7EYoK`P(WyOb#t>Izx@poxW>Q6IXo<^7-{KnS1e|0mclRE0=yAR znH~COj+Le$<&^sH;ID|VFc0(QntE)WjnTTa5MtAEd$b1wB=eFw)N|0*t&M(lUk;%o zFJw?AN5r4IcI;(#jQi1|~Mx-LEP zu8{`TOBq@~xoCMH>_VbYH#7GHAV0W75Vtmcb3S=n^KSk$AH8 z`^#JBGct0@si&ETpXy7m3x^c7$`91_5w!1*_CRD;K6L-%l4BVP+&ApvIF7*Kh6z)m zuwYe;3h;l-E#N_7C+FNb4Fgh61e#6EIeY8TQsl!a_zYnv9P|$!qX=j`~;$6+FyFIS34hM4}Z=w>bV$?z1C@ z$D-z!kVvl*hQ_0Or$~)b7m0FFxaPfBJ6;?k?^vnD1RjK}H5}M-6=>l)ieXZVA(@^HKI;-aK?sh!nVt4gzoWH*DhAic^)Ma3sBE{-Ylq|x)v2uT)qNtWG# zb?BUEcDEl6*>Z7FN!SRX%s^`t*^kDfaBS$Y=|N1aSR?|89;qK)-QACw6u#Ugv6I#A z!)erz>wyX=W6KQU_XZbtpq%87%?L)65{^(AyQ*oo9O^tZD$+xCCZco$&$$VZW-;=Z zD3ITVc$igL_E|{uLJ9eWW1<+uwGgOy^n@^%48z{H*t#k3&g>1tdyr2l8XL#p+RA|V z1cLM^tCj~?H(njY{euj4#Yr$eRTa7@3M)~(!+=@=tl*613}x7`?85W=wBd#19CDSYQw={imrS@e zZ|oyZB=HNC`czZ#vA?7k=vPWwEfr|4Nl(W827{ddv2b$!^dq~Y3#`&7IbQuJ<%N*+ z^*e$#G8ixX`Um{_;Bs)~tYz&0XnB(oa{nWc(=3rbq5i~`Jy*Epx;e7bHK*4Hw6`CX z+Y58-bSDb^@A$LhheXDH^0N1nJ{NYPp>2|BnmSk4Z`f5H$ ztLfh-jQtBmY2Jq%zn9J9KyT5NK~|Q>Fw@oJw-h(V;)8t%l$Tm1C?=k((ykIt zz1*gOyO}gY#=EF1bKwm`aONg+z5fegDg8_ zsBM6A6d~LN4fEiD?D6w7_KcI0hDrbK9s)q!pKKhsA|Fso;H?cKExR*p$I(|NrDrS+ zd(hIvA&0Y8*v*)-(Obzc1V%hRj-{e7jp=@5XECbcH{aR zMWinL{bgArxZMFf1|NyJKhrkJuT3v-@UpXShUM?82`0{!cXmZ&DU& zsB_45%jTqO${k)rW716KqlD$)MV(nDPT~IDEnqvFiUGlEq#6zh4Yk8R z9+>8_v(?o}HDTvODN3?lOStMc%^zbde^e<%S86FJ+$0ztlvK7#W0TFfBB%8NbK!_QPh9*@J_&os_awDb1-ehxY94_0M7 zV`%WwWVSPzkC6{eu^o%V(0O4oj>2y4E2LcL+b;6Q0y4XrGdHl3>4{ETTd(S{u1hN5fuQH zK3dMSSLag>5LLr(6Xlp99lFAn@!a^C-bJ72nIkrYB4;I}jSZrN`(c*+IKnqb`%GC@ zKtpgP&j)J8OfuPZ@L(thuas0x!*)v3KT;yi8;|D)y!=l05?u+|%ZmK&SXHegBZ+DN9#mMpV=Nu+wy)&=qo$=bF zceu=NFX`NyKV8SSf21sTKKNb2Ba?;$P&7J2E`fZ+3#2Oh(?_IzrePmuQc!B5b!?o> z;TM+$<;S;|gU92*n#A1yN=va{w?%gq2{{QTKK|YD>fUBg&<+})-U%F?iT^IXi*4(V^Y?>Rw1pM)!8Y z0~nM?RWel2r7XXHOWG4aSu=?)NS!!9hdlk`GAwD2`5U+g(}q(S;DFTH`u4}F;y=K( zF98jJU(+VoKib)}&YJl`5p|yXdNC*{{_3R2lYJ-`n+ev7)?M>zqvRIwFF?(eLJUS> z-rjR(!-yOzfwJ{Qg@r1XmPzZW!)8PZh@&VDFviO;F=&lrKzr_nj5fMAHRs*soOdOt z7J?uTYy-B2&g4ucA@2IneoraS{+xA=$p9+lG(Ffd{t5O!@2XTpLkMyaSmDkf47I~< zK<#d|y$`3dn%~0VDO`5g9Fq3czz#!8M zfcRr_*X^Z*gv#o8M#27E2@G6Wozw$Oa3m5O)IoMc zF=h2@O-T+McAd&_jS+H8*U)J&YFpc-;e@~-i89;fV^cAba`$w>nD z#?O!dHiHI+DB| z)te+inkJ?vbOCFr!H~bvEmjQyDBjaM?++PM4}lg8*uG;&TT~Ab2dJrCgO+@m0@Tk-A}#-I=f;RH^d^(lzV{;MhMO-;!Wc| z2KS^6%h$;0cKWT}2vLkD2lPD{B?gZ&!o0ej+M4KvX*ZE@>k~F;zwNs}0*CP+tLV4Z9{H;003a#LvV(}JeBe?SVolBQ9A=xwtA{w` z(0p5hA-8Rk0^cGYWm*Cb{yNBqv8!|AR-mt>C9z_^qPn{JUCS<)wMCLI>)H#SJqx!e zP}M%!pnV_tH=OBIo9xg7Mw0O!Y`blq_u)yVeys)$y@~WJk~}TN5>LS?M%lDwbf&ct zPFU^*DFY1SCHVPw!2BgF3Cxy?aC$|w#vL@fPN~zNPkFjfi<;hg+GNkblUd6Ge!KlA zM^}zu)ok;G$N`HqL7kO>#USE-NP73Bp2x~8pv92rJ`@}3X=^%Wxk^1D?4j5aGj5x* z5d>mB{Pa5krrQ$*mHUVa0Ct(B5{Gc7EdW#U@bl+qNj*1S8k_O4r)lxDsR3ud9p-Ij zerL?SQ^W>k#!IYPwHpFed+Ml8Nwd!_{M;em-I5=lFx7QI91WSuC^SatBBD)Lww>C& zCnQN~;wGqEm#$@Drv=2p{Lyso(zKD1LhJsN!P?2=fREFJ8^t5#41XwbZnG@R5E9dG zPNiSEPqzROE+U$UgdSSYW;N?y>TF+*MK+~L(yq3u5&pNI1fQpZpxa{fSIY96xgm~h zUn=G=@p)Xu{PR&bqe{16w|EN&&^pX7hf>SD^n0SKVv%CcKKSzN?;1+uN7(1Z%o7O4 zJUL=3G&+U=b`m1Prc5HKP0`}@8#9*uhWL=ujYlt%a09tS&!zB=80-3NPkcIQNs4!q z4vUzh$(Ap=7n76piYB|hYPA26q4(+gYK1~aGlJ%2afkug*v?ogT0bCd6=*+6wLb}~ zQP=b5!v&40qtwya{pdT{QpCDsN!u|?)xgc30HX{q8np)`mRYPwRAII{q2-nc(;osF z<6;J_w2;GTy|_8g#3;*XKFpiZ+#u)7GF7|hc(*sSPQ%=tSe@!idLb?uy~BVJa4JKx zXSc)nkj8vtsAZ|mOz`*sioJ`P6b~Le7_ED$*KaS~sD>A?08Aa8E_+>_$`H;I;|(1! zryL}^u2DSEg4s)Mtk5lArdy(-#zy0|qj@h-LBwe;mZ=9j2QhbuJSn99HDH+$=Msl$)Y>7}64 zyQ1*>y4!g_F!xp1I?w18I(pkgoQvCjmv==I*J$7T)*K}yDnL(}hGsPA(&gNkmt%eu z*nMB8y8Fs2?qADp2k7yS6+xgWTB)e!S@p~O31GN!+{R*WxwOR_P6!#UOym1NUd9&-G}!l z8YqYKdLd*F58KJT0Nx&s)8I9=23wYBzxykiif+V$2*v)@vZ}dFzSq+{e`Rtr(xb56 zk;w}+gFw-5^So}?Ek~HuS%qTYRY%^;w)fz$=%`W#RAO|tTB-F5zKCQfq-gz6RP8;< z?2J=1h|%%g{)QSxZ>#M=r{38LcPdX2y{|NSP`lw+%Y-*BL=+Dl8HQvrga3NA#|)(lcsfCe{xV%%^(_TneWK5*mxXr zND4o#Vtp1^q@1II&EhX?q~7!@B=FOR-_m2KS#Z=xm!nA1K$xvSL*iJGua#^Mt#%ZVotIg@s?X&Os|s^@3gu@u3@Bc)_p1DW6@TZ@`^B_yX{82U__xtbpVu4 zr3BdsxgIa*zIW>Dy~wc%u7nsij+**aVe4OGENL^m59gv`X_&%b7IkQY++N6=*1^bP zq*XP`ptKh}9PUIM9s`FQVz<#<-V7MOQBAjR-yZEQ*9Du_tjbJ`u$t2L;e=#a=c7)j z$fC_u3(QiO)t@~mk2LVL@m})qz)~B=tYCA@m%g8ce~+v zi}h6hMuguOt6^lnBl*Z)V^O)AHI^>Y_RWJE{oY@ZbSfaFQH2QY<2RAq##eBegB1Cs z-r&0-?djvwI8MU)bA5<azvtl!d>v>c#5`7B!XpXhXO60n2j`wWPMy^k1dP7 zop7_Ga&qU%G+F8(M2UObQov2|vb`;KH^%V?xtnaJ5X@!c%WWc14Cy9HlplKTuiO=& zoYe(-2P%U$=GiH{f0C*HbaelTwnKx_UMvw#IiN>2uvTbWDgi=?alwM^o;bW?A;JnP zKh7Osaz-HY^Iq!Ulk@Z1?5G29k{5#ShumJs6G3-{iTBxmdM+~L3+1r>=4&^a$u;bQr1vS7lOB@Q2|C^LIAdiItYux zwv%=JZX&f|M8D~o0MA&rrDEihMhYLq|6yKr1wh-a%~p=HS=VP2ijAR!52kOlJ!Hk> zCvIVom}7P)G?XxIr4aeP3hThM>V0iZBCk!(5@?Ucpy4uUMIlk4LMO#32dd{r0_HP$Az>UyG8w(bvuh1wS|?CvLg+v?8B`JF`6xNpz#{ff+pHiPerT6 zZ$SoM>@eXkqOHQNJxuB+ zkf@M2rqz~SHaVg4BqXKCzvY~nl1ZuDldgNbKjS8wxVSn}O2?SYUiIgwLLsX5K@H7; z#5D{|H`98%6oN9ye7c0%kn}B(F~Mko-kqA7$|oS80y7|UJb^ZU!xG+>LG`Jz_9D(C80{t%`HDq}- z;t4=M)V<+fKE|}2Xwt)VD*$@LtmHSAV#_wU*!dp^lu9TzqvI3`*xZ?f>L0JjNNnQ<+dYMLV9FzVlGBx9Hd^#u>qH!cb2XdyRwS-!2N##2nVd}YVy%Y+<}ZY7QL&@N!7+P>KQ20wxT%*`7BK&vHr$x z!c>frP(LF#lR;c&4L7VJieQ*)hmoNsNk)dqW7OmH&X0a@-Vg@-LL7P_3?mk?=zOgP zG!*MHdj7(N+%3g{Zy?pLx~J<*R2|6v)re#P5c0?nG!CBT4<|4Df2@58Sj}ns|B0~^ zgT_{3oGhV4(W;CgN0zjRv}m$bN@>%ceXTfJNGnN;%9heU2GbX#MotpuQ)oL#pFJO*i2SLOJJD$(- zANCs0u?0}wMvXMaF*no3i@=ANIdtF6 z_-veUmqS&rP^U?X9v>-l_X`*7)Xa&fMh6`oncQPb@=Bh_@n4n?V~iock(GvYxw|SA z#{?-5I7iZWq?mK0@c;u*$cMSO!NY(c@LXJ<7R)aI1|A7O{=LQd{x@?T^A;Z@405l$ zU0T$MVtKF$7z=|y;odwh&SuPk1Fvf%*W!%cW!kTDWw;)z!DC_S{a+#F|Ct#7w$n?| zPX`Wt=%Or(Jxi>Z(9CTxE0No&aQ-i*WBNQl-!9}y*8ecper1mP`O}6h7^YF|EDj>(z~ zjwv#!qk2X$Mljv!=;>_0{_5w{)2`q(WZEgH9nUcN9tp+K4oJwU3bxpv?toA*+W|Jj zu|_+5*bZx06`gpw9f(^OPu1swB8Ey$V#%m%H7ckcgF_}ju~vKs1Sl(1#K$>C;Jn4ipvfzu%ji)si>6SbNd{O(SU721tf(nf-q}}-ZGw_hP9f$QU>gP zZ_h0M;>rHb8i61|1qvObZZ03RbIRVJSK1fQ_N`*ClPY)rn#qF;NY2iR%&}ko3^o7y z2_6nAno;U`F?J*8;z~)q`su|*`I3lHTZT0r4ONkK@%kGqbr`!=ZP2>NVWe0rv!Bv6 zIMGK7NUITD42BajBO_=p_?P}ud*O8Ij&XrvivUAX2o6)Wd&!N1w9KERr@J$OSyBbS z|5-owO~HS|d2xJ=U4o?B)FGvw-}O3wyR&RoxKM<^!KGpUtUkWEP?f$#sA7loc9k2Rpwzvt%jf zT7PAX`|fwTbm| zB$F-A^(k*+z0GG)U8LjKzTFplp&|Tlzt0$5Bx|yHV}Lc+(q5p3Qb(tX%3q0mE!8@5 z=0M`(GF48~2~v6Amt5ce?iikAZa*^Piqzhn*2b@+jo{tQrTh~-1WZ=@d>Kv@1E50k z3w(}Z`1VKxg4>2B{$TW6Mo92~r7mSt_!VA4`0+y%&9@uOyj^f^Zl`7$ja87GZ$vD9xgvoM-4jAdhV3`rj+YABqBN@$Z!~EEdqxE=>eu1YF453dLAOEZ79( zWUZ!#h*abczmohci6^#COh1LZaqwyuUx(+F$QbHM65+N37-p?(cr_$M8T>g`QI``Q z_bVJ*AP6Hk^=el>^MH>EyBB{}4t-&*rN0<4|PCLP)$ZI zB3O`pe|CBW;Vk+a7Y690wrNucqS3H970R)2KaC4vg+S!sV6ihV#R2GBT8UbyWTQEHD;QV)r|Z1|45x&gke9pOuG)g(}bEeq@5(c`0FRhe(<9Mde{Q*2su`J1xA>^ zPtU^f&LQ*v;UQv${)r;9O)FuPP(st@@>1fVb@4;qg zhuHQ*2h)@xmw}(l&BMb(=(9x)Mm!ZPjv9V{xQpZKc$H6;vIOo z1Uin?8aWteh*3k3{A_^qB*`fT6RGdLX!`VJ(EE;D<$$?HS6@^n0rqTpJi~1^(Bs}$ zD7eL~YBzz2cm`E_5+%724} z)DU@>6uBdTYv9b!r5+XE2+)&=Ehnh4pR_EF3#Zr#LSVF!OaK6ON8ytlZ3G9yEx7@$ zeRyj-7+isZimZLrp;x-nfWE!J*Vfce2j)#|G}ZUnfyr&aobSAlsS>J!7feG>FED6% z!zllS+fQKSg}F#UDkF}aQrqg)c0v#c%mKy(D9C2NwD`1A+2gV%f^S42LHX&?yy6!l zMTD)Q9fw!7;@4#BP5^|;C>r1o#yrG#%7>4l#{<*ax$mZ7o-pnD*@4!69G20%79%J~ zz`u$ObV(dfr)DZ{Wfi#F4^eJYmPD?+6rUY>#ZR+t0xw$8y@wS07cL`pCd6eNXdRlR z@Jp|g_QpT8SWDWm{WXaq4?zK) zi%Xb*>GItMXZ}(;$*G<5YdrF6xK;Vzsf+AR*&q@U46tSC-TnC0w{GRiyHycJ)w>t_ z%oP5TC|&v9MhrZ{3wVG|s!h2GPeJcqJVZwJPO-G);4Z{N4QmhC3NnBVrA!?#Ms3&% z8|G7(bZkLH`ttPX&qiunpS-RAJOSGS6KL#P16oYk?PL9KLsBx#2Vf5UU*S9 zSCq&YarkBZ9a>+J*O}B95R;J68R7`o>rg9rqQC+rUbodF1Lho{NBbg5@C_=1FPLN_ z2PYf=be@zV=$c-$ckEoE72Br3Oh+{tIgWTGf=BPM+VlF>ljOpe)}BN{YI2{L2ZVOv z?5++A?@9wUNuXDLNZoJqHU$9Wd?CGF@}q4cfvT49Xl^u;6*R#z%zJSbB$I>_fT}RC zk8OvRcwkq z*RfQyaCh-6uW`UH1Z3C8(@F!xIH11w?!(m7nj2{%IS|lC5o3>y{luwLYWU(5RYlHL z%~(@BkJg*q0Fx3*hOJXuKa#W`Tije+jMYY|rwS}hed9mKFe3#6My_NK+ns8U7DHmG z()#sY79me`+C`2(Fj7)dI%`qs+L4fuFj*h{th=|jaMkn9*@i|hk$OEB5h85&^XKRI zXKLuL0Y2C(_ZtS+L{D9gmH;qLc~d+7c5}_SOLD>P2YO$Woiv%brz_BQ+zHte^CQ~L z=bSLQd@*);Zz9BLvu|9yl`?UU{SD-W^g8vtc546q8`E4Luh?tX9tNx^0!S2-^-*XL zf+ke;*GzMrlc5ok9TMErVk$ZF$sK>yl`Bo68`gF-%yD;v%C4hroJi_;SNU14RWDtu zh_%a4I+R9K;&FZH+xlVS*U{fz>2RHM`L1=LZi9fx%lvB|_p8qNbV;e}_+R)`q8cKo zYG!sFoQGkpC>M)ZewH~}{m&x{1PdLDk+ zq-VHqw%Wc#YE@%XQwu=i#T{br`=Be>@OC%Pi1+;fbw~|Vwx^k-BSR0u`)R44ZS|Z) z)WmFF-%S8eH@rLZmh80HOzU+^Fy8c1&$8MRb0a?_>vob)9ekyFT_?E9aNvjRW?%+2 zNT}FlngJbeH4YmuCZSjeV>0|6K?$SZUzaK*vN)n#9=#p*>)SG-@fZfW)|me?IlqFQ zNaYyBbIlj9O>M^Mf%(Y4uZ2sT?8S{uMm_opDk^MZpuo^?Xn%qkaW0eT z8ap4QFNC{Db@7_{XXc0Ou+Ec-GKx=2KmudW4r~2}Y?ECXDG0p`INz^BFFl5!A8is` zkg6)EH>vF+f*){e)p7H>GcA^|Xo|_ro$Kf8By3_gle%YO4hna}n)dc~7u(z2xFp?a z(Jp)P!aRDtcsZ1#?G$g&j1RrWuVVLV+bKw5Uu179>+L-x_1TU1Ex|=1;I5CULIQDT ztsy_iD<0v?X5V*G=+y(b%{*LN__-MO}Z1cGmzW29im%*oMZp-X@1F_O9s|5KTrjl%{}KBHX70K>O{ghW_p0GDkq`6vL<1h(Cq_qePzoVpM6oeL9o zsCk>>M@oGT_3N&|mL6eQ2qxmKu1>6}UbL~{BJ9zDqK{y%*>ma7sjcl5}@-j*&nC z=1CDw%2tE2oBU}RHYg+=sG?_&N6rZH%<`luh!JJ|hbVcxZI4XM=om~{0XmkJycFb$ zbld3uGiV*^@lKQ{XDvcD#O;1{V{6?DbqIYh_0pdzLYyNJFT@~jMroHt$`$gB5~`u^ zCKr6{Se%nAV)5HV5wo4zL1p#3NQ(WhCQFx7cVv9M3hDq~bJt0WR^kWUndl#?C8Tg}eWFfXonN2dCl(STo zb0r-2!&^-M2-JmTTkATGVZ!_Bscu(Z?IU=ZXM`ce)K| zkK(NA!Z2}p1(MjHRmG6WtJkAar`oN7zCz=mB`q2tB3@lJuha)Yc#GZ~C3QGaM zfkfYeQdwkV9u;&ny^-r|fZijiyLAg0^a~`3RL*b!21_Qz*7_oxBwt$Th1(wgnm0UlPGIHpoN);syY>#g zPnuRr3d;Mb5E;@v-?L|Dq}}MM@xVfa+DfgxeUyClv)grs=91enDwLC4Nh}N~s66o1 z6HiMS<@_LibIIQ_GH98{P6+xLQ0j2X#vy9R=(phy`nNvDbP;qYnEvsLkw*Um?z~_E zZU{P5!fn3%Q8@QCaj2$TUt1h&MoCdP&w`*3tqD52P6gZi!Rtm0{uh)jJNx{#speT&j}fpd&-PR2eFfp3OvoZbpgc&*7Q zkNivrs1m%X!F>15cQzMFHs%Pw&CWK#o)1Il;N(Pv_EtnoB*7CNCg1gD^g4GQ20Q4d zrrqxVlqIixp^}cb7yJ=WGk_gDPY^YD8O;CM6i?M-x%S?h{NGVP4{N>NI zk%PL#-ox&Cnh6ynb}OB*tjfq-)KjE~?-Nw$Cdod({N*PYjQeVLFBFly2@N%7r2rh% zXm|-wF&tPF+6=G(`$f2~Z18~M+G0d2F{|8F*BRt~dTQoTz_kXSpLR~Z8IR`xT)^VR zw>P}QG`Ow;9%npOPyQ3HF(?5(iqt8(76}}z{h!g5-?qj)PN$@}zRfxgtpyrgW%a$8 z9GlviGl3w|xom3R0-Nt&>Ze116oI(?;CqNQ$xkjsMAzWdEuNPlHhQyZb^l?_I3$_s zU4)A(EG%r8yD9AklZv5mYvjQ8T<7{6<{*5)%N#f0x-_TD6mE8+4j$U<&h<)ISa-Cc z*PBo|hwHT=Dc8XS-u*}o4=2P08YP0le^|o(Bnk6d?vL~An`$C@0S3m7L_OP>RVzeSKw!Ij+qi^+WB zd<|B-AlQOvA;4*l|8Zq6eoHA3;W?&t>~ybIjW1CMF2ggqX45ufLLuFdY1KHI#A~%tWS@&7OP#+yHdo@7T{_;T@77hI;%W zfr~IcY*FC0`SO>bdh}#c9Ib~-LzurnWS|&A(4Ir*9EtV35)M;!NU9j&gT{zFy+XwnKAs^XOrk`)TESNhh#qqlfu{;uUi9; zPF0}_lCW0)rHQYt0iPol#fND?B}VuJg-^#sUtNOZh(qE+#g7vHrz9>#gyUi&{F*Yi z&`}``amI`pc^I67XeB};UR$H~q7z|v!A7X{rzvrnoRLQPqBe5(pKuQ~QaWPf?ITvG zFoan6Kbfao`7Mk*k4!Lo@f- za}u+WT=8?iA6i)C6>T0jbIo%yfkH(W0TUKxM`@rRCJKSj9+dW!j1Ttc7I1D-WKwn; zvkEyni3kGGGA>3x)aeC1U)NRvt|H;?3cS5iHY&78gaxhjk<99y*aW2op6{rXmrn=B zFjpul8UY5NlFjH;4VHk;zBhwd*`PdYl*<`^Y(~D-SnJc%5;*a@F$=!LxPDs=LXG1Y zRA3UnmXv(Zc-RoKMcwv`myyCy0#fe+QAm(3M8(mrcM$sFi{R3U$0&#$ryW_Pxng3! z^Zv!Pp1g9|T_~)T!8cIcBV4&0LFo+QUD;jVb6~dDF^bJCG3&1XDxFBFQ87VYOHB0P2I{V!hevt12srChZXcOE~IQD4_O;_EacK5I35cfryy zsd;I`x3L%_Pm_x5>7G~jID(`dJ65w1Q$+S(-X95)HiIi7RT`2ar={J%0t0s%MzN6~ zJ~8JFG15(!NtKVM9|ayT z{Dp{{CQ<^B;pPGIzxSp|ku=yK7MpBtNLLZeD+C{xi}D)}u>2FmV?*46y(o_m`!^4c zhND$hl&yd>^deHUcJ*|%la!B_UorsqkHVN?EXV`w>b5WAzY)1VitPK&xl52_5Uuu$MKQpTsxyjHvoND2+(8GP%FofCB)^9C+!A_T z3EN^y7ZXkTbTRo?-8p^wVRDdC3;<<&;}mf5HZM-FnJi;tTi@!-fK=Nz)0s5Ad zItpK7KxJj+Ru9uZ*MSDReC0|w25qlizdoPlc!12T2zl=NWCZ(~i)h7Odo4Pu5Jb2w;;F*A9Wsy}3%~^?tQ?}O zL5Nqvmw45jmQoR3))V{*dhupxanmD#ATmuqo%vLUz$=c&=DNl+G08HpIkRqUDT{jL(Ftl1P7Mgw4AVPFT5-XBdi zsvoUru`9k--lQ2$6Nz7gA`{^pXGl^9iAU7_WUjk8)Ca63Ql#z~K7*C?(SAW7mrkGl5556I(fR4(4_d z98lw9j=r3}GWL(lNTPU#uvzOuGes9Od@7R8ch!JD3`a9=0S4U=dH+2h@g=yMoM|DL z(qU4id@#Ooy|KSU>-yR4NKWle%h$HnPPC2!HMtIZW?PgKRzI9srlof$TL)@bei9v` zR&+ZIT|#F=wX2(3K0sRJq-F=|hmz^oX*pMr(t+tv*2vf_Y5j;D1Kg2gV^n?{Z~baO zm^t?k1J{RcHXBY)LwX0UOtfp1 zQeO8)@F*~%Bj&6??AatfzRLh?J4&`EAPg3a5$YF;?tT>h z?@hMME|m2>{Z_1q=HoAK2?iVOFkH0ye10^{hOh@<(+B|aX87jBBdI~;U4eSyNS4-*Yl*T(RWrk{;#6VJoIS@_f;C2QD@h$RU|?pk0JyULpmq zit>}Ly}eC)!agQiRGOSZ`U!;e>MDBcBJ`0l!ked~8Fc8PxBNl;MNhmmrzyH|B?aEk zv)eUx?a3K;OFmNlCI8v7!c1iQ#Jp~s+1)n&Y3le%lS1MDrt@w07YnE=FqgcGD@Gt> z`N*?=zV<4dqJ97Vl@)aR&G<;$* z=)RBilq3a7L=P2gkRLXBD4$N07yg>#9tYKm|GeLOHw*@OFFKp6R4c4gFIFeqYy6sG zH8H(F#|$}bn4!S7NX;~*5Y^V7IB{D!n~3zgQ;WK4{>i%5KTJYwT~|ZCW9{`+3v4~= zok^&2&}W=sl87L#^uUP8KIU&99=Gjo$roZx(6%^(;b^Z*7khlpK%%T}I+?BKxrs%U ze2ziVu(4iI?mIT5?J=qS1_Xa`d%e1#oz4{h%q!T1I`fUDl!U6z>c$Q)VYq8|#HhUfI+d}1{PP{w z4h`2$(q$qatVLkzc%VhlTs?(`6>T8xz}6SgY<7CZBetmK95`SjP)F<_aHN%32z#1+ zK2xMdzBjXxAz1CZo=utF2!hm%;nMM6f$Y?!8lnumnJ5xPwAe^}c{}(*5hj#>6vgnw ze=bw&XhG66Mj%~UF-L7-Kfz6{?G!S^V1BQ5uaq@HxzMLRX>E=CF(;~*h0B! zFN|xv%`vy_HDt$$O&g0Ee#Wx~pjXjs*w=q)hX#?=^qKc%aPkKDbc)g4#3-8|VuLnq zpK=ddc=ixMd+6oK_Jlk*NET4XiJa*E8VcG%X|=_84V(ou5z7EYa8KsUHRs@S`UgiFOuB7- zyf$3Ew>`6`gB5SdEN6QQAefMVZIJEqhLJ-G zJ(b`AE zz_O-x3y3Ug$cBi9o`h}W2DP#2?H24Hrzl9qng}xp;yQ{N9IsFp!)}J$v`Vn|)6tBj zox34}+lBcZn#qodPF(;m!Xc}SDwY~IZd^-8P1N5EEvdOkGx|@G_F?ZLO#RG4zNktA zw^#YlX(Ov4D14_NspqzIvZ#p23ji!y@0uPoT#ZDk>}3R;Y;luqJk}R8P0?boElqwm z)wXgv!G$hiO_-Ij(Zb>u&Yd9Wgl%N~CR@8GWg{}C1$*?|g*V;^w*4F98oju!zvaDa z06U9_uypg%C{$!#bETw7Qj6eT&JYR|^77ryXCZp0Pe-^BK{1p7ErJ0%w(HVvG~XRK zx)K{vK30AJMir6N8Tw3BNIz&U0G^C0D2U`PY7W70m4@VA9YKfYGl)kSL20ZIWFxDG zPy}&AsE(Sn@J}QJDRyvm>kOV>;i^GoG29#Imr~oJv1z5gUy6FsEw;VzG(Lu!-4g3* zxK5h%-;g%!=bXVcX=Q)us+)E=@GKoGKJKgTP6~UfXztYdYA4-S2oZ2#PYqefGx6eo zRit}j;7PzN%Bi217S_7C+=RC`;ri|Z2uf`M@{;L}-`vGi1Io#A6i^s%lx!rZSn+dC zo*x2DZMo)4brIlX5bg&c@`PlcIA@|Lua}cT1hYLa3(MPn$MZaq`e>zau|@5?ds`c40R>IhKFLFqFgp zdlJ=(U(9#%CQRjmX17jDEq>(?i`9;ClKnd=SmoS*kSs%g;V#jgPmjBmdMHFg?ze~8tVB9M7F_dN$ zoe4muL01RF7gXN5d2tRT1i_#{DGjFcHA<-?VEW$^G^XkoUsjp`E(N+Lwax_WlRf1W zQV~~&Z+gqVeeY(^M|-^bzZT;_e(lnlt2db}n0tv->TSZoK|#{E1|(l0CkBKWpOP~X z=10#n1vHO7fLgxi%eiqF7E#pmA9-x@BSisBjz_^4vSYz+&@bb5-m-01BnMCFi znx#cka-kbz2Z6JpSh9PFbC+V z@~cR3hQV^iTY%|M&xPGzn)14C8&hfo<-g!a!8hx_K&6s=lhSET?6h=j1GR@K%{m<= zz1_M%STOyQ#qQHDOm+OE2T#YeO_#YWSg!gATnm5IRsh9gN=c<(D%QCJslLO4DxuJ0fkB zM8HsrZl^5OFMp<`b7am~&Ux4b!hmHMRXz`#ylBTMIDzNV@GNk3LwC&i@&gR#F+{w< zS{V2`4uc9YMe z#r&x{#aZU*g+Dnr;^oUtKfe)J1E^*XkPmJqf?CG*h8+3uzNwvQrg!4QkE6fdSn)7Q zZ~gxiP|#T4IFxk;C>y}GqhC?>U+PfEJs#N9hd3_ixp0nhQX*kfL#^|UF^p&{hdS0r^Ha2L)2>6j@PsX*i8(9Rl?cpBqp4s4QuR~a$Gw8cUb z!4-b}twLinqOSjL5kRtFD?wSF1CInHG*)FR^$Oax2~1T#-{aI}{>Q2%!vdM78Nyq~ zjQAsH?j{D`hIzjXUD2&+^{c7I7(PC`<>Q~PIAh7T>GHt?gML+34q6^^V%plFhnNaO zgYug?XC$8&V(xc8zh-7dcbrI+(Nmj@!krcGi=W)dZ1Ry2k-UwqsPV(>kx)fCMVv70 zFU=Wwn>-}qA)yP0uyoRqK;*3sIs$XXZrhy zYaHc~REO&whm4lM;8L|RsjJSiNBKT<@^4(W_9)-!ifaQJ`8O)~J@8e3wXjd^HRzXL zD$z8Ewtbo#8T$twrRVT>m6twdeC%o4XXU}+pxt~)HWlg0LIOjbXC%0M+UZ~Es|vQB;b zqId@f2O$+-zFX#-nr8<-(-aC-qA*b z>p?pRt2X?N$t&OrJJA_X!sHiO19p_IB*!2kN7myHazUVCeGDixHC%GnD#A7 zzy#uop2Lybf+M!-$P|<3AK#_c6(oQ~mo|HQ=r&sUvKG{8y9#5?)F+8JPyT({%@6mz z#Y@IaP#hq+9;h&L&b)c)kIpbMmdhRGQ^s=0HQzKyLV(uR=g(}t@$$3!l`fMFKmghT z6TIcr>j>K42q^~a;a;=7XkU31vt+HR%A3YvXmT`^m6g?xj0iX~^s(&x<7W**UV(-c z`gm}>#FQ4S*}h-HmuvC)de8za-0U%5-vk~12x{+%4+17LD@Rl^lP7!0PE0-?=w%(ZB-Fg8C_GO$ycuZ$AnSW9uEehE2W5$#| z9mA8$n8TwN$;)FJhsy9@R*zP8?b5wv$Nr9*!Et=qq@u1a0u-tds++FmTwmg)y3I{H zx@{ngknKAsM_e@47>>Cl3_&CrdQ@VoH+q_|*{f42${9CVJrc%QG3tV!-l~HJD2&hm zG$#}d850wOFr5(8q!jT0&`Z=h9Y8@2sAUSBtwhxBQzUY3+H?-zEEi8KrTdf@&ngwZ zXE2O?ZrK6vx({z2uw#dh`{_zp%z@|7L60}sizrnMvgYpF7u(g<1%G}ZT;P;RjTAC> z18>UiCps0D%u!QiV}+MTz5vV70;sVPq~V-KTO4)RLYtB9MTm3`W?NpSkzeVZ-KbD# ztPV)*-)y>(C5CaArz!Oq&$A*7b-Zcm59WU=3*y{ryan^$)~`f-Z%2(N&8%r%X$=Oe z1@$RSeD^qdRPlP8r|n#pX2QPfn0XmNK$V}LNMZ|DXxoUe8zSWxI}6B9=)nS1mP2R^AK0_G zmUg2@kG2HguN`NRMLu6lgrp}Qer(%v4~;PT*l@%YLm9QYp#r5}%#t__+u!7ieGwr(tXeue3J#I zPSx2azp)kM>`Os%%N)^jk2A)D5ob~Lms65ORrbmHCTIa}^)_gaeXwA>fPnVfO##xY zn<6fWmq^n|7lws#_{fns>5+Fse{woqeN_~)VFoK3>H%T#;>8)a1;c?NP=-lerl`ee zlMAQ<3j_=5n;eqBv#Rg&hjM&_l~wDCR7NEG{gjJ1TsY+mT_cTkJcXlvxa*(asqUZ* zT(LqbvLKvsV6QngSKIri#GqTQdyt{9!#nwB*)&h`PaMUfcc0dl3CjG zNgr~Q8UU!O3JMCL`Y55HAC)90Bc3r^jQJ4VCu(4AUHN=E<%N*Dk$whh4Zec+$PDLv z__;Zbxca)g1uGWEXBO?WX)Op?Hqh>-3`P{M(a*s79`NbYrx7OlEthLH*!ZP6iTY_Pbcke35!AV8Q{gkXvazGl6OQ+$~sUo_%x}ZTR7L!am@P!8E z=BnA)+0_6cpc{aP&h%bp75<)2U_<+%qesJ0qQxOiJ@>|TCA!GPmD#-mEn2v+)-7#eczjS@M$u8VeH-o_zAc*Om|3E@8P0gZ`z92=2d5+2 zs)571iE1ylt>R!LX_H#JEYW-DJ zT|;L&uAd-1bBkP5y~JC4_04UDLZZNopkV%5ma5< z#wE{k4c(inrs5ahDVF!J;kbTSp_qqzMZM!1p*QEk^2MZ9V>#f_+_KHUBuCuC+na*i z3}Ap4aur*v%63O!$`ZuvT=dtQc*K=o>g_pafl4$<8W6HyPl;xN=cF`8ng>hiAkZkqEGSR1us>L5bt4`- z48Og#0`|;2z^O(~NEoo9@dB+CXu@U#_@~MY&;&aOQkV^BrV=YBUO@gQx`+_u!XifL z$)K3+lbS|MiKz<;UmnuB`{vx;i-b(|^k2KzZ5$k;vsKx}N ztX=uzHe>rU6Zf^)|M|v_{VTcM4oC2rwhI!SB@6eU+MAa%uNKhJE5RW{hJ>R8T~QpR z5#@);P8RReC$X{-cy@B0oemqj5RT-hJzakl42*Oy=PRuq!?^P9;*7s*bZT+F>OIaM z4Iijr26(JBwL(;?`0D0GHv7gq8lzHxnlHJs2MUqxPK972$Hqv-Z0<$?u!oU9vY*MR z?wBRk-y)P^Qfc^kNVMm|GuR_5eyg(41Wl$Pr0d=7u#StyWg>+1rJywg`wxvtpZ&%G z&d8R=*4CuTiFeiSvq#UK*X~UHDFwHXl9c}WL|=0{9EZSHJv#cAdX$1BF(>&Q)lY=C z;5@!AT4cN(s*PxQ-jO47)it!$cVEBVEFP25 z*FkgQdm8{P4r&@&L_VDn10^jTwlpIA@ z{xb&Nt-VY>8^^N8zoK1P51ff;R~S)6cOI=&!{_tGV5)WV5y}GMY`z%{Bqnru@`A0- zV91#)c$ASMtaiL0KU`xyr%BHw2RB&`SKNN`>{%Z98=<=^&_&x&Au_QBQ&Vf*k^yH@ zrH^uE{*7nY#V%o?yy~$BjyZhTusfxW{YA&!EFWo3n3^qqElD#Gk27)Qb>rQm8rV9} zH7A97cA$!q~3VBfID?ASig4)OUY>Ru6lpxd%TrU?W)if5( z0BXh}4}*pnYZS5K@f5P$*@{y#k7(qq-p&21Kj6L(RNL5lTQJ>oIX2MNnKAzU{)hk! z1yYJw2^CyT;7A%+D7cxI2=v1dOY%Y#DcdI=!T)o9l=c`5(DC6j@pz2x1{>E2@l%=k zpiD`L3Sv99G>OXp_43_nnX*l%2pV8zW8nwVL-5iHgWs~{dp^1?eDn!ydRkhV5)ok- zQv{Y(dH0K0&WL%mcSzfUFZ1WBtFkk?U<1%<2unY}R zeB~$j2q}&Wwv20;!*R5{0QN!{jij{J6*RvzWE}41aNx%Rz}YZiLE53?)|#AL=vXe& zLjw}E8kGX!S@oHlz%Y^a03EI{h9%P%$A`~F3@UUBTDQz}*tqiK@+1!y^d*$4Ma>vD<+&7CW#>&-vqNRZ4oc9}5pjJe*N3|W4A>oW z@59HBW@gNg+12fjHe{w9wXOHViQ;DKXqMa580Z%-`cE7%ea^Fbh+k{q&G$q(j&4-~ z8`t5ZN4@OiA$nHF0Zh*m@G*4UoAGoUzVzVmi08Fd{Of}6Wl5eN96Pv$-*u(j5Uqo@ zyo#6n=!z>;$?_*YKp*vTN)cKJOiXf64YOt^iu_zh<0c}-}O?fssQwh{M zRGD04ucGe$V)$dwZqd;t@fue*={a?Q&X&yEGErp$6xx3Zn=F`rE}m`xt5HF zp~dGVsZYtn@l2*F`e(i$7O&?YTqrVnarF#Sd+!q=Y9$43WD-=cfB$|qhX~!~-~T8u z+R8udum-p`@_{lko3L2xPCMZ7*#eMh;ab&HH$m&iOZztgMt$0wXZ0YTSF!Kkzt106 zf_u!$20hCkr`!Lg&YfoaZWdrQQz=vTy!(Aw*(&yf9Vh#jFr&~5pOTsnCa!IIU}tn) z6i!OsBJR}5?tRj>MNT6hTSxp(iLM{xzLLRh+UA1ttVwcSg7=_-aNdrWpZ zz}j{Bah66uukFu2*lC4o?HSfvf-JS)yss+Aoq~U3Wn(aL*=YwZE{^j#X?~Ne+=yAH z)+4|Gs;jWv9+X=l_}Z)4vT|~kNRHExUdedz_WOwVk6}07Jyu7L9yP)vi)#%^mxxT@ zCmRDDz{9Rteq`hZR4}+RSh@EfJP_a)bM_XIoWJ3o!EsKLjPtt&EyK^DVOnKERfq{w z0pozZ3$cIABo(Ki8k~P=;*O9elAOH-|5X_58(?&(Xh=l0=sp*$5Ibr8&RTKcfp9Q# z!m1FB15CIZW5_xn5`v<9MzlfBN@Pk5=jHXpc1kIg@PQ&4VHZkDX~(Wu<`eO?EERM{ zJ2HGiPoEyM^AMMC#&pp-1AxVR8d@~SxeA_|d@OggHBoK?GQq&ofC2_RG`l@GMsH$k zH`TW^Yr}_6xPRT82D^K!4~Y*w`e>VrTCH50qvPZ?MpGZpSnfItY44qBpioIb>W?x7_B4%Ku20e zmhszZjkGjce--cEy`wtr5-{n{t?&VF!qWeSTV>2``*2`vizi%NZzp?Vn(Ak;b=tIm zus?XVip@GWnFHo+8Z_u{5c+#q_x5M??B``%gs<8m)dtwn^Oh;>xG`YiS#6?YaM+R! z*m9z?>V`hZoFL$@m>En2R(^-}Y@@<^0}dea*IyTbgmd2D$Ez0>>;4O9@{gH;ck7;W z<)SfS1-RuljF@*-a9!Sgbt9pRgG=L~roLSIhxEoG{J3F)yYm8iw6$?d{sqpS#~q+- zDuWN2yAQzb@O4U@3qj}I(b0ld|KXyvmj$KGnpKpi$5;m$`b8*ee!lrm;tsTTF5tFI zFgPFN$5(7$95Q@NZ*veZ!0Xsqf4-5?uaeO5-g7K`Im1T;)CjFV#UJ)8DGT5C|87Ri zgLL|7E*wYLu<-WTNZ!ELGcwb5uDM`~JHkiI3I~)$I-3PCa^JQjoKNA0Lyta%D~C?F zc7O|D`D&$I=gvR)msPTyJKL_*N>*C{Qz*BrRtVw#RsAeP{T|L2xCbo!7Y>D&zruw> z;RPJ{R5#%BBUspX)jTzvx!(jZ$=}y{S>%J^+%Hc%$9-z?f>+7ir}qRyhKEgmrbX!2 zXTB4#Ev8T6^dYutG(BRG@k=~wM5i<+)NYC_ariieLsa>S{qsw4J0Tf`{|HcbE_ma| z2_y*nr7bBT7VriuTcsyPcwD^$N8arm{ndM&do?N`MP^d&OU^?0LW7DI64U(Q%%@VrZewyc+Btq*gshB zi{Q<_xy>$P%gf4$Hp(IjA6Kp#`dLH|tDs3DaXcpBt;Yb2NZ$Sf&Ur;v?Mg8W7|y?@ z_n;Z5;RG8w$jI=AOn<;0uTnN0NFd7%?)?|_>Z$GN8~(JnPv~fC*t5rC%8@{Tzy^1n zc?bo?uoWf?a7XS8H`$8-xndV`N%DSslx{L!?5~GI0UfPkBg#PB__a70(OcWjo;}-w z?#>997~0OflHML`6nE>;azzfG6dUsM6}gyaWXO{cFqY!+dTk(*4e*uOOw0qJcw*q> z+s_#P@y8!D_=(6MZ1Ni+O(4%5CN*U6O&_>@@4}GlT#x=|ZG!M)OAg0-4Er20;~T(z zZ%W~}!U&0L@~*C~A}=a$e{5=^X-{>TJ5jx$THJY*tF5;ZkLP$5^U>+lWn%cD^}RYf z%-?Zxa^lPc02W-D`^ov~(8p^w;&kSKC4CRUSD#?js#VuYI>v>7_Mk60okfF2ka8r% zmrIQ2DHyHvxaS<#li(a? z@v$oY+M%2)y)m*YfViZI2LmQ-O-1M!Uq>OlpNL%6HN9Ydfh%NTT2^gJua}>niBs*n zcdEf+6(-j8pBh)KUQLs*L}H%-$%I*tIJJjR=!P%I=LDH(oa;!#B2bRyKg3)v`yR1Z@BG38N*I@Cv{tC#I@y zYV7^%X=9_tY6ZOn6a|e>=JX^6wZGValaxuFmd*Q$@dY!5rm1b#;puhi9daU3l!c0H zF7z0(P3CG1YqX9RFb|2(Jta45?LC7>$p^7wr6=V=sdDx1zNoM%+`J%EZS62(<+%bIGX3-<|DmO@f@B20j6F-fknj(DfrAl%RyFCe> zDR6dC@~ka>8eW<@`+PRruiq{?Gc#dcUPhWot4+QAnv4#=-8MSfhBj}^a^KnsKM-r* z;XCK!O91XZEpl})&8}+Z?F}8?D|e?|44`D#%bjr?EIjm343|d#Gg= zb)Q@P_L%e0==lBor0LTn#zcm%0fJn|n(WqYEQ*-C&ZIF1K+gY+=$h~`B zAqF|N?4Sonb5KI*?(Tg4T;cFxK9if)4Rf|{=S#k@EGPJ0#rWs?pb?}f-&^tE!S64k2IvU0xJ^78T$#+p$n{AWE3?=&aN zR@e!5pR>0g-n$O~Nu*fnw##3xX;u29cfF}+RrFn-td7Vuh8N6F6ao@vIR6;^SCCj$H zK2?qFy(w+_q+kQ@NZtXFOOWJG(j7KUAf)t$2?964@RR+4UDnVLMes$s{{--p5OqrI zdaeLN)t%DY{YA8^M1Aj)RXmEFHvmd#nRdb`1lLGjPXY*&buL^nlo=~< z4VX?&y}ysDX>Y2~HSI*M0mR8`Ro-;R!O~ut`>FqOlxopg1rf(_hRG0YKv- zTPq+m_q=lDO3pj3GINTw`?Z?l#CQlj0uV7Rj4>xPnHZ&RF!DHF#jubdKvW@4f0nRM zo*eSpC#)Fhz80I|kB_##7PRoZo_BCn^)zJ%WH1vSiKD@t2{sAaC3!mX0BSTNVL*7% zB2{)-RaGdc^=eS{^V%x@T(M#fqTluO^iVQbR&uckA?>@YE}bKlEUe-k5LYM_1q03T zmNdH<2e>LQGk~j73s{JQ^^rI-PYKjcg1Eu~SDpfd*7?bXf^j9~ZlZ0A3V0jPD{rfn z_b7?8)rl9uD<&Z}Zjf{eQ<2zUmT3=7_J=7{XIyfYd9<=*r%_34}EuR+>L| zD}a7dD>7+|n>tnN-9hi8-z~xVo)oRi#CXP87+h|{uMo+KHPh!CKR#A^-8v+lQW$WBVJ$R0s5ABJgO8Jp3JiD>0 zFky({aKdkJMfnlE^0?{42h#~FAlyWO&Urn4UW;B&cv8idQjyO$iV->H*I%bUSWuh! z?(JL73vSCopm{{^bVbOSzr`eYQ=iWG!!xISLDNg^M-&c1Z+=ZSvYp4-d%rCrqY4ij zcz0PXb6M#Kc`Qodz5ICs^b+wNx5*OPpecbGDGP;EgjCVrqJO>D@LR40XpFsPNNJCM_SI&YCdoXD99$eBb=V6s-}eK61tu&-#1AI1n+NI#zub65MT+fdK7&HWqN z^VN@X8qx_jx86G--0p+G7U6dO049C3j-3;a1R`HVvfJD_8>ZdB)l08{Tvl(CQegFc z4Tj+nbY)WAi$*?p4hN@Cj}&9;cAq`_^FE*d3=t-RKaS{0w$LkZ8+Upgbu z2?4`~BtI8@u8pnA@He(#BN8eX>WB)OQUU?U@ANAKWH!p#HisOz z#1L_{b@g_oONOOY+6whSpyE`UKaqr`jsu&bE!9?Ps8!silXAYduTM`q<^fK0>^zHAYI=ik?#&-tcqC zkxLx54@dM|P}#XNOA);SoPQ?ioRCgPX794<5BpD&<%O~rQA$AIK!v~rp9QDk*C0hD z9Nk_9-XxP_pEJw=3Go7_bFVpuZr_UTcVGc#Oxpbo%t^ldeh-vVrsJf1u7x=N53 z`yr;P_@rw)?zx0v?rh*MyTEwBpS6we&*5t6&*R#pV$xwWeSYejpaJ52A&%6cKGVAcH)TfO{ zF>C<3zYgT=Y3~+Pu>2DMF+Jy01$#9K$~)ShpFD$|3w0^gqoAQ^4N#SlzF`0Ab`X=G z@FH-6x`$(6In7Dpn>;xTiExxnz3#-A5ZTZB!2p~=g|9ieJa~3ZEbz#og| zh#Od4(%gb6Y#_6cY78y8a^!U$9v<@8fO1C9-w-r(|5JQhKPI{DRKdKYN!$kG8VAgS z$wDfYwZ$&y2c-H(Eme9_G>OKxP_vKwl&q!S5Z|zr^0vYEJVykY&-m%eHZ*rfZ{qf( z(Y1GWzr$~VM!fn^B_ow~ddZRjb5WJvji6*Z$W!C9jP<7=Mt7;=kwKEV*d&>u_uD*4 zw?moe8G2$ouD9*lOt88iKYl#i#qHE7Xu4k~+Ql54F_8Z#WG_@SOj*49wBAI%q!@b; zUJyD^Kc4!0VOu{ z&vn<%F%)l>8$b_tIK>~8^kYZo>c^f}P+#y(-ZRA-+To**evzC^G%&so?mv?S*(Cj) z*ST|*`01Te@VH-2IUfvOnsF3BV*i&$m9H3VV2HI--ZPN$5iyBm)&MPiAWc!Im!B*c2^w8;&L07dpAXdGs8vjjZg-wy z3JDoJb)Qo{S;n$j4WrbaE;zNfB| z8Mojgfx9E7JaKd`2Dn1_sD}>@;cfP$H$1${KCgZy(L^;|0Fo}5)85@IrSUnvhg6WnB6(&NHuY%vWTeUYj1?>$3fcKJ=+2ldOguP>RxA?1d4Gh4h6jI>838Jr!BxtnI+=p=Ce*?qSXlJ6PJ~GKH!Tc>(9FJG4sMGMm8aQY-^H=F`o| zotJPfUwk}TEg%lEO89QQpsW&MLB>uhD@v`UL1&K|WpR>IelJEB8b(zBI;M-arL1I_ za#7#*U>FA-E`0#1_6@unKYq+2h#DJ(E@sZGxL(&*S8(O;y=tN%@PL$DSxoFS#R!A*xU`Yo#=e@*9J}zLEet_bKZk} z$AM;tp=3Kf9u8H5@P(Fht_4x0Lt#UJlI;e;=IQC_Cruh^YQk~nDJmum--Ykbp^_;G z6={aZ6=hZH#R8HBk}QnS1L0#MO;2ItsD@T14^2Xvy?;Iq_bRBfZj0U=8at5FB`B_q zF|SxhBkO11R_nbp>r7^^Tn;2+7z)`-*}>t=m3Mz78weda-ttaKY&Ls!Q6yS28a;z) z@M4D#9a7Ufq)<-x+-J|SwP<=3T-cw*xsc^_z&zm&CqHI-Ve6d>;R20+hLn-wfV|2N zm%gBB+Q2MHJDHCUUTdQReIRiZbShD7VD*Ci!)5k;2%z*C97QzW1VbsVCszPs01y*O zvO?(ZM3TGqPFOiyjhC|*Uz(W)Rs&yLVrK?F<=mp}d~}-xu9PQ@LLm%;Bmp*#sa8V3gZgW~z_q#`&<2_w4BP;{=$ zLh-wLa8n7f;t=*g!O@;}T_gmg0pCaTb|G2Um3Or1F~~tVnjZ_)nRo?GNN99LSv0bK z#`1yuM>t0yPr!~!47?1wZ!UEEwa!$OWiN1gpkT-LP*M4T!Px!~d**(qSQ*YwC&E&T zUwj#fq&NO}M>$P+Q{)Z$?KR!z{4O23NPO()oI}PDTMR7%q$P3=)G7#`QN)r~UVT>= zmWVFCNjP?4Ue}SPoB|6SBRGo?Ob#bQ*mRyI{$91Y)7?}Wf z*`u0-5ff9+E9khU3Xtl9t*A{`y>;bfl=g&_L$9FyA$}0UaB$!HwM?aE-cYh&w9118 zAc8-4g^m1Z$Djsfv9HWBk63df8An+d-F$I6JP8s?Ldk`jWm&cf=(f@0C? zGrLX)=rmYL*lO`s#`Nk;)Thi1nHz3?-ezQ|YRN_rix3MmpEZ8WJSaHW5K<~^lxS)q zd54&K;~`~H6-fj&r{P8+{|TrblLZl2i6ZL4IuBdK6IPIz$Fai58W9}r^G=JdFTCP#lyqYo(8ebu6EdiYG;CduhwnZUOePT z9bv+o1*nK#al+VBk%e#skxxB$cH8vO*@su-qiSeddR)5yi|}F2!r9;WBr`J~$5jAH ze4#+XlMu-}41*yQ6}>u&p+E;{nC zk!NMn2AYmTPxh$3NaK}V$E%)C9ms^Tq?Bvvl>s+=JSyNe>wx$Z{pIGWiTj`K-67Wt z5fj41{a2?BRr~F8MFp9uT5WV}tR%_@XJ-bABQFDaWTTL~20d!Gbs9yb!h|j@`By*} zzjmgv{_^F^DS30pz>ELK+L=K0+;?mLFY{cHOc_$7$QT(*LK!P%D)U?#6(yAWE)t?h zWC}$C;YI@@63LLEgi4W=$SgzT{p{T5ocBHNTJKtCt@mE*tmi!UL;e51-}k%swXc2c zYtQ%>W5&&s`o`+XmqBVNPS<-kY1(o4-q8bN)y9;1QQUu~-|Bj$bx^3++OgF7J?St7z^vbic zK9LP$u)m!c*jmS^=7wyrF^p|k2K^>(r|`M=X*|Dp{rYvld*89PC+Ca^E+xBhWjPDv z@ADm4U9stHasH~O+)yu1PeVPsh%0}pM5VZSu%S-36Z+pAiM1F7azV~B#l#U@)Bogf zEg!Te!(}Xg(4QRCjc>tdgT#_^Wy6Zl&aIqU+Wk(>-}_>i)Ge*I7?4ceP}{KX zzlRm$vKvR7MX$|v%F1BgADwye=KJGf1_;wCj{mGc!<8t=gd@1 zeC*6MoLt%1`YJeo?>~iTvyQ$|SNwUBL+ks7&!s}2`1r)>JH>qLy;;2{JaG3=R=D&b zIP`6j*1c+g(2Oh^z0N@-Ja&-_a+d2}>SOU>ki#{-KTsA_X+!!bY9;CcQ5EK~@kly6 zccy>!tQ*_n&plH!d_C)$u}5D2hoerb)~i;p<%GFe8;T#5jvi{D?mZJmw8ye0uMO7Fg8*`IImHsjy-TeO5^ zD{50J=N%ABG`7wS)`}SW1}KK#<6U$v)+c5SlN|~0gl-{0*JJP|0!=KYnKkD^`x<%iQw&kk5q6d$3DA$naA))VI6K zbfUPGcSn4hb5M~p8nBVFe;`f}+{}e00#t*kzoZ!ZcM*x6tyjM$Bbfu~{!axnNe1K| zb6G#eo|Qt~t?D{)m|2uTx9!lP`WQ=**VFa9SO@}NdR6{lc;zTxD;7*TaZ8KnCblhz zwd)0%qT4w^p*D3xUBi~Qou*mBnGisZlGR8l-3Ja#yfvRCoBLX~Q52bgfZX}e<{?rj zN=SY*fN4Nkt;bNrls731;fS|cES$)L`E_-5q0-2tGJpW7mz}(P@zf1<4EHV#mkAE7 zSC9gh;P`R*PJ9h<&Ozkm;DdkIqE!`VbF;zSx*OG^b)ZBox*VH*X%-6rEf~YndNnWi z1KjQ;6zun)qT*=?ih($QIX2K@otrH)7~;W0gm34RbnvR9kcDKX&~HzH_fe^-sf`S# z_{OgI`OQ^EFoeu|pMPfc z-%0ntX)Lf3+6_=cS)-b`n-z!Do@BqkeH$LL?Mv(l!7DbSl8pU@7-S8I46zG2#eNw< zrj#x5&6>IdfY=;RPm%JV67)_zPGLqxR(eC~j4J<*rO_90H^k_2E99nS=-k>RSEi3t zI5|U%iZuMspa&EhE@PrXfEd|1SeD<(^S!fwSRCT7I8nm1Hn3PBL&;jgILH#7Q(qbD zf79&-e|N!xNO8~kBd9ioGG)j=7uF#fG5FM_Z;YqQ(2kUxT%4{kCu0)XFn)Yi$_xZ$ zGH1xU&X_yx33a3PM(q)n6|8xjvrn<+s~5EI2%W@Hzm}X`kM8I=6|Ej z0Zu7jrZ8$EJ_RN7CMTVxJb7H5bS7uQY}^TC)?)cEq(O|vQK4DRnzdi-^Te$VUfo8e zuZ2?$&4w;@7<>-XFFUQ1vY#;m)e<%ngl*4cW>ioGXIa*M0J^yGp^FB_9%oJmStm|l zs_(j-U39xGh&ZEn6Y(ryPx+}6*1b`h$;Nr%9Z5r<@t(AihbR-vVg@3H{TQh&!E>As z7OcfufY*4!-;;?0reWko|#ld?ncfgbU7Ohr!s+(eaXj1{JyHx(|U^8 zmDMxnk1Pmi>HQnu&l!v?bBs_4eWv`9G33F^Uv3t~Jly;%S} z52j3Zipm%kb8$n4z@PBj6P*fV_GsVmH8W)C1@EV3+-m?G9?>&G??nz@^FuaZNZNS) zSG}>PXsuZl<_YOmXf%}r0%HnS*jKdRJU!^hphhiRlHdWJEcW+XzrI|yP8?5sT3&wA z*KLoX>+?~GgI2uRg~|W2OUAwaxUe*8-aPEjpaZqA^Y34p<^;sHedhU*+4r?IvY1Y( z5)z4QW9PSObS!VS{(kqn`b35|&&-A^?ZK>{EMoE+q14QUL6%pH!aoo%47bi&evL`Fn( zl6gFh!S36L%E@k6$XMIf+=Ci_QyCbeG79tlDKi%s%9cTp*VNvTQBmf)b|%qU3EGMJ z$uC0YO}*vq0LmK^_d5fh2 z#LFp!e*>2yV=BKj+VN`@Nml^8@6+`Phpp1=Y^6El%1+-_RBtbC+$H4f=zg;g6_eZ{I3l=Oe z)BQQ*v{u5Uk!e@1xaZhRRrCJMUW#m1p?DBnOY1f?+{WBU<)vD`OZ_bg@GPwzqgLR0KT9%-)K>+mjmZ-qX;XRe=HMssN@+sOhQ z=U4S{a8Olx`;u131H5XaL&@qROL>*n-<+>7c0+$i7QALdUk}6lPm=DYF7+BccFV4u zz4S!b7b9jXQ!ukU$H?1EPwQ0w$5SF-WyiGG;*>oNd1g&);q}C_)B|qvW!-iKR?lu- zmPaVIwrhSeVil)DVY|%7_Q@1|Lm|lAabl?YzAJ>AH`VG{N=mJjlL1l?L*p;~{76cg z5sB^w+Vrtc*irVwvK9<_@hDE~$H^AkUq>e_OZ@$r5%&+jm~P_G3f0DxY4^&Djs|UQ zAGD7SLeZUi{PDzQ)}Nn-CbXGalC0j&B8PJ-b3~}A=e?7~!LXB+iw0V3r!g$`qW~1y zTR;Yu4&fipSLPd~vF7<Y$#Yvnz)T^a ziF-eVZ&dbU8)FYk2pIB83=~p)v$`(wki<5KvC!Nk*W%38?;O*5Q!p6HCRpxyxGbS* z-tp~=CF=qkHfuJ8D+{0sOj-FhJu2##hP^{Plt|v7T1qyNTu$cf_C571ZYuje<)(wM z-ju2W_TstUWx||tUyd`(D7>S7P}^@E;dI3<13E4JX&#MmE=dxUelg@yld>?!bxGa+fjTf7!KWZ>uEJ3TW_|GJAbH}gC7zJ z$||kb$htF!2DvOf&$x_+i}D(7Ai9(huVu(KLP&97%l4z)jMi9s;M8UP#vpIaIT^&2 zg}F8wsutP=K8TtQRgTkq!O=zsWQz9U?`rSxg6ITN#X*@0 zrw2gA+wua$Wk}-nRSwtgI}aeKV(_JSVa zZ|;qjiT|*WIE+&8SBc>9Jetksw4C$rF2(`u@)n0F;#E6zhzE_sP+X@hnD~I@*gy>i zaHZ(DElo$^^iJIUhLg3q)y95{)|Ed$vd1Z;W)b!7#rrK6Ejpt0N_78Zf==L34x-h& zsXrN)r;L~Or%lT~MMF^z$UL+=y=U24_NT8^d1(kegBdTr6P`TqR*{{zD?G}4<5MbU znERhj!jpEcPYSI-j$^1+m1DqIxA#RO%>BQKP! z>6jZ<|7o;5VsII}yx52%k}4x_-bW!}4WCYbW+Hy790X`~pN%)u1b0JCZKu^nufOr3 z`nt}{F^ALJ7F=IdfuTea@*;MmiRHvAdJKtKPiOt{NjXw-GwddBjQuRKxmKu?741{8 zV}Enn=+5kz+R@!*4-}#6HSkF=xHu)%lqD4xk7*YyitZua4-05XW}ldCG-3bH=^VQp zu`UKS5l_mA&9uM?#rW9waDBswv@9Rto=s?#TF{bj#{194)LwSjp)DErqp7*$Q1s#5csaS;w>XtR5B;($-Jp? z-IrYv5PVUxzn(cV|dzV(f1xn+v$fec`=Tu5%H-P3jLLs>FAZT z?dxGA5yPu?|nI49|TJlbE9TN+VAY<_xJ<$Fby&(Jh zku~j0fiF}c=Lc)0K3k@@nGFyLSF2{v%J{!~$Yy3W#&GPjU@DXee?3NJs#gj-{3WBy z)*XhG`Go}>xf3CtwZ_oNj(EE2bx&yk%EqbBaAUf?dSz87E}@qww~tFC*{*V9UDzEl ziDrGoDz+l{Y`6Bj=mxsiAKKRVTtz~jBup+H>$C-;i}yW-Dc#KdORpVR{qctC^?1AF z`)g0UMgC)LlVO~U*xA?iymkjy0% zHx-#njqPn8@l&)tbbnrnj{y;-;sh4qQ<%rs`A|OYLH{q*^WcnUPq-qwo7AHPgUT8N zTvj%&i80M|f&u9mo^H>Dopzl=~Xbj2>>MVkXF+oo>hU&ByjmooUL8?Ugjk zIJkMU6Myw=+p=Y(v|Z&JELiZO?$;8uFap_NigPHwXFXpNvM!cq-Ctx)%zx;9JO?I| z1&(5+d({=)N21Y1KI`7LT`bzHUmEvFQFKIA?Ro@*|-zi zsD(}=5(gO&7tzYjV|FSmpwx@J&Yv?k^zimfZKzC%)0XTsro@r4?TuxAG=}~U* zA1k>dlW9J+T>9hj&T~C`5k0<+su#P5hckhNYeyN>(lW=x{qQXBinERN-+8erLfqWM zNDdzdqGA-g!^?oY@v$e@=>~7#K7aP}!Uqo~qrBD+YM@g9AQoeTHH@9J=3=|`XXks6 zIx6))x@tcs(N#plB1Q+hC@$I_Pl)wg;AAPI`1y*N0l~G1}G#O$RM!Qv?xVR z+uIC-ULhfVel&lp80LhyUv6Qa@Q*rxNPr-@(MGk4BEmi4Wk&MpBxiWCs?EmDw+0rj zDdkf?1btsKua}gMrPx{M0$Csrwp<|(Vy|shmh-Bx`UdTC3WmJ9a2R$={l+JDDNey* zfc@aXtJkc7JBax|qyRpRp@Z&Hp4+Jov1mm>QF&cl)EalGwYT)=rIFg*DfRK2$e|q7 z^VmACx>;S{*fdhRy&oua37$3K%&=5bKFeTSlv6v7;h2tVtfiMs)Sl{Zsie@q+J6E{ zF0gwjjDv)&Rm&VW;E3Nm_W$U%4n~*_UL1 zf{;1nc9uN zfl(A8AwT4lTdn+SkdMb`Etf+?#$@+!iWqz|Gt&xZ5*hRo>s2gAn0ixESl|DD_3fkg z@?GUy$uVV7Qn@r9k{Aw?YzYTpCVH;8v|0*lWe$uYb`YW-gh5D9u0!CQyIO&X>ml4Q zmypmab2)xTa%Xu@052R5nN~}op5XI)t^5?EoabI0+4GrdEE+yAzFA|1g}IC-0_DCC zE?%>pk+pweTt}JliHkos77!Ve?UrP0vk7S@&{`NIKIC=jbOd8H!rwfPSKuo#7GP?F zpc_M%ePM^MoKCTgSi5#DT0hr$=mk#71aeM3PgLBdct4bFp^x4?rA!mx_U6qS&9C8z zb1sQu=U1uGZ%bNTv3-{*QoR6mTIN#I$Uc!@)XwrX+_9G|7gYbbt3j zB}Jx4{`#M`JZwj*rY^DKea4Nfwf6{P7a>|e?Vff?+pZ0?kEF0(dTUo7GXA9D-xnlb z>wu6)s7W|z(G2haG6#)N3N=O6?`!{-wrzruent6B#vMnAUz_)WdKl~A+<=3Gm~4v8 zDD#*Zg@u=NP3kFj%%O*${&wNA@FQ?v&82cId7{n>bkWG$_>{Z=yBGelfqkSXK-&#; z%0>drjqF}SX4f&ye_MbVWn#WtP+-u-HBo_{f!zlumDPkK1P@t zAU7V&VDc8XRkEkaszC8UhP5jL?>;2#ye!|Q*bv7@5phY)j6KjBOsuW;w7j>b<{`WJ zT{Jb>qS{Z47fE|=qdk2`BJHs_ybLA`EIpM~j>+L&{zxMx1~rS`^*^Lwy!q53y8VJA z;WZIc&0eX)Ah)aL_iBaqLt07?$2yt2{z`aRPAMk&)^C2%z>OBU2`*M-n|FUTSocv% z4H=n1^L=h`%haVxwQCzJ%xpRAThZUL{28UR7`y89?tM4sV6A2kXy^-FMlh`@XCXO+ zH)5TeNwL_E%Fc95tK3C!Y-<9`~K%GXHp)I4H>)d`x$VRc?d}wXuV(a z9eboK2;@&Lcr8y3%cvXsgjVQhIwz0!bS`mpubrbo*TEdY!|1IYjtlFgn{uRE*RJ;e zQsm;eB;J((DVsLB7Eb6F277&s1S`$dXjIg{Mz%7H9~a(tVW5CyAufdjGRU<%%-6=S z@aXs|Tr^fJiZ=EyaJ$)4hL0-x&0ZH?22=`ZNkb15?EnM3`W+vD=0;Dp>@J-ac!_eKRX-l9;p=8YVc3)1OR6VHeo{g5~iqZyrzP5ar4p zqd5ioO%|v;I;j2h7j_%53C6w@SNKiG^jGv*KlkpN(Pt$}iA@cITG-peBu7bndiOa&ZvR@YRhi(A^-CfS?rad?x0V{l{3$|MvZR_hG{d z)@r{@ma%*KN*sf#k0qs_zJ3kUqJ3d#FfpQU^{;n6uPF<79;kFh+NGwU;g~gB;ak-j zesfK-srfnJwmcQN&ZtjLJFkQk6MhC_{2t7umbq}dvd)Ea-XT| zR?hbn*U!d}avVZSp4ZoX-b$ci+ zQeWG=GfTH?b?@EVf{JA$@=m+MEQ1%gM}xHN&X9N64Xu z_vHJ|KR>g*#dg8?kKBIAeE`7TJlhdNCphu?W9l1TgLOJ47CfkQvDN@fOxdk}my4B) z>ZX%e&#jkcux>!B=+-sLbNETr63F|;A8u#x@c&h2^Ef@k*0976PtY%9Q{J6{^V17~ zUFv7X^Ta|jJ^cF=-GK)>LZA?zPxTA8{kU$70L+r+*sWIUY){VIjCyo(0jtJJ+UEVM zo~K&6^ros->|u~}D~BL0Ev+T|p^_)fZ>uPxJ1o0B)Yfrv^4e@IVOXE8`8t#SojsRB z%$E$XoEG(Db{}L&50T@X`J=cN;0KbCj^6MZ>YjWpp}5B00ajAB)dL@Ls0m0Ek&Irb zdlmL>A|F#bXeMmE^bA|Cm(N<4=l+1fJ&E+d2FU{Fqy{@YX6qDt`|?qfA|VZz!m3FD zDuNR5z~R6{b#%L86ahiTuTg7TNcqex$ZWCf7wP*Z?>4o4s~R_N-plQSsznQ#3gJ~h z5K{+pp@5`pmI)1E0of%XX#= zy=v>sN0+RJsNCj-2NiJ&Zq()6x{bxC0iAtTr!^gDXfQgWuhhj7XxU#OUri=OmZD=$F|3)wAdR?PGerjQj*>t{OYMjj0d=NCJ!qe?`A;hoW9PPBMS85+NILzP@u=2a z!}ck;%*RaPF1!@AsB8i5Td3aDWeiC52Y^f_KtMdPmWM}Eu&<1{xxHJrZnATRzM_S( zvG1Z=yDZPvs$Ie!Kox!qv2;Qlx%vG zScc}!4r0O8!%X4ZNVIrg_`f*xV&dbEwZlve)gp_8O`rcoyD&w1Iyz;Cish})KNkyq z3VY66?nq{he)_I!*>1D`_zk zUKab@Ua__KQ@gIRhC4RXBE{y#(mi90QmmCtaYHV`_Gk|U6mAe4m85$7-*TpNKc_862+Z( z^63XPiN5e3MuotMwbkDj3wgV+gEt)(5w6Qyr^VTyZ62j{8(CkWzbW31Lh@qQF3%0q z3;n_RI57&urSjvBv-M^eP2Wq6H0vqsGK7fZQI6AHJNrjSctOZ#HaagMmdT6(r-T8i zGz1u^Yg)*SbEI<;5{V*A*5*nF9h8*=J?*{xYVXaYDRfX$(db9e4H>mRZ&Bb5MkOjs z_OB~{Nzj{0VFOHG^1JEe4)S?`W@5oWm&dR(!yWu1&04wiN0#bRx&xTA0cs~?;4bd~ zhf7p26juu_jafsG_)L3mN74l&MMODl%n*^H|N4$IUnI*Hm!N168`qfIZ=o5btqnwx z3`|nfCDuO;y9YxO5}>{os~}qEYkf z*gZx*YlAD8Ib_>})(v-*VEIxIV;xzCI`_>_n|}Ws6VlkmHQ^5a}JvK@vqp)@LpxV zqI$==ifMVA2#=dR6LpZd(sKNfzqVIHRzHKx7lfG@JE}6o$>i%n>|l8;nQ_s&9}3B9MBa z>Q=Xy&uvbUcBbHQ9g3(4K2FSqVlvU&oX&BwH?-HKSOpq#9a2n+9gA`PY2T(*k&7fA ze$7ExV;%Ityj(uJPxbesSl?%9=IyurMC+C0*7081`aF?j3BcIDKOW4|cMHDghqoVA z(p^Lwn5vb2fXoM`+?&}77X5zc)LGIPsVF#~3+@RzMI&z*lok(B$2Gb76BCiM#d=6q zn_^`lH4lxs6R~VFDrKZO_4ujH%EvV67*nsFl>24A(4ihHjAXV#j6dnO9f1OSMWR{m z7HOQ0cr;tQp45?p0Ew4yqDneAgdgEiAZA7+QZqOvVs~MSg%yxDFQr-eyD)0AC6I5D z#)<9L>A^~&rzssKU}O)B>+Uwfe)`EPtz%CE>dZopUQ*Y_E4_^K4B$#emc@j6=p5B{O2PC%a^DE!QHfp1A=}-fGkjG(hV4=OA zVIe}(*btdGq!kOw8MR5h*F)w3$7)b!eSO8)+%_$-0)h873%%M{p%YJ0DQiB^XGBZi z^;^dxW6q2rY05QUFJzaw1O{LJ)^Xp6$GX7-QvLTis;O83H((PT3%8_!YgB6LZYqqA_urAVQkqy&lqO^O8v z3|Y#Y)3PB{%!8Sv;)sBJ^>diPuPXl3>&TF=DVar@!>ln#s-=j2DfAmu3*nK)PC}vE zBa_ye>AqqPlyb|~t)rO-Vj?G)HcHZ65zBF%%tF8%HlmM0Rhv5My7WY(0{Qiw$n@@* za;>8vzq%6~vvp+ijBtrh4lsjd!iN%G?oUUb>MNcPD^!HlGxK@$XoZ1~Z`4lbG+i9? z*!TBuC44{=(ri;hmordZucW`!WTeDDdRHuu=+wy=t~{4|JZHPT6|FWz6}&xt z=Bsb`l^V&dZ(|vY{8j(eCH5$JA3lGKqEQ>>4K{7DUwwtnVGeV7{Q0;46khr!!>7EN zcp}?UfRJd_=^1kEh-vzAAVr7`uexK;9&t}a93$r}et1j8$ulIfS7Z$?qDcHi7AJ>k zWxnf%HG5=Q3K;PudHGR9hEW?jtTLnGl#oVWR$5(9S$F-%)UulfIMp&|3wZ3^Z`hNL z3lSp(8D+Eui$-2qjiHlg@^x8U8nUY*(Egdp?PXAIfB>IgNq^{yXF8wH(AK zt@GuBuC)}I&UkX8ynH|&*EW|#KZ`urIOx>AT}vo*0INlBlwlbAI6Ul8)gwm6r}=6H zbrYT0_$GaJfuN$EhD7Ko7!t2;n-;w-YQeL}+x#_foR=1Wi$})m;cL6CcJck2*}T+? zZ8rb{ldp3l%|jq#H8k2<=o4>`-}N@0a9KPQ?{haL@;ab*sF|9YbRZrf%?#$nTs)Q- zCqLe4a)BiuOiCuav7=$q1q5a7i*R4;lsk`H|0pcv{39Qp=BlD~_0PPHDa=8YORo(!_OIh4Y4#VWQT(;|T7Tw2)NLPL0Vt7oGgKwRkMK9Y_F`6(o_o%4lz zY!M4$WJ)vUxt=n8OEcXI6Y45bJ$TL$*UV^3i(W?br}T=WoFwY;NotxWxIbI@t0<|V zZtWTK{Oe%G$HS~YcptB2(~|~|h^_c?u_9QJjKRq2?wtjFoBXZ)!lFVNsKT-V&JAcS z$@X`7RoGcqO<74uWERDY(j>eaNTN>gq-!0Fj4Il_OfFI6yzEhCXne5!_38 z%snM={iv+4mGWo#4ehWp>2`kVTC&h~L{`EZBSoqKfHhi0sB0 zL5Ke+>w@XKjpYvGbU!i(i4FuAeb+NDupU*Iur7fjdjjI#18`}(&hDNqZZJ8NhKqgH zBGddgM$#wN-`phWBtzvbmzM9#+cEriN@fHr?K3Nfl>FT7TdKcq5Ag(0q`aVPA%xTM#N@ei{Y zK+}jQ@KDV5-=1Q8g}wq$_ZDC}vkt@Lc3gq3qYRxVlQd$?wv zQ9>(ic(v(=9y5(D{6Cza=$vUB0{OSr;O2^BF$9npObUr7#nE#? zqcsD3OONsw?Ktf_Lu5Ar{j=0aTg}leFT88%z>DL&HKOA|H?m`6oPKiT)shTG zHtTII8J-R6$rV+7`(cvShj7CN9+Wd}FJ4qgypCqsc$T@GM$M!Huf zzwKk>{B$Vw7|Z}kG+me%8iDm`7D(jd0<22D?!z9NO0^-=%$w(^354zm_95aW_8KS| zht*vsRHMCITRFgfQmM+`%Dlysq!{8u=uWMz!Z34z`}VcJzDz|F>VUGE$wnVG_;Au? z)4-qMrWS|m=yoH$aJpC#fUYk=gsrGVCIOW1Bd3)S5&9ESR3oflj$6jxC7Cev=_fUw*Of08-hb&K z@_Kh@`Rt03*gDc!lWvyu?D!^8g5dbmRxDak8{KTjOcZ;c5pL)h7}!WEm$o;&$ZYp~ z5YichuDm}O|F-L*A4;Ey6U4tD5KgJl(+|O_ktIgr{x6wAgd325wxUaC4FChmmy*5KKu?#1Z zuw#6FS4*E?_BuVUq}CQ86zj{J59DaHXfbkFeT6z!Q>n+%^yXK4(hX5x=6*mA!M>+aDKYBk^&+_MmOjYZH#DD0jvG##O5xt&lD#~5x(yaW7^gE0y3_qx$*u_xvME+b zN|8J?8yioB@7N`TbNOmO5aCiNxkDl;0_|jCSJc7jtR<6bOLkVo1<7zV5b8`})~?X7 zqKQ;@pJ|rY%o_t?g$5&#afK#vO+?OYr=e-lSv)*=_D8vtVg-XY%UzMZioAM&s6a@T za%-q^9`#y~Lc;!nv&wm0_lU8N?hM=S-MBGR>Ej=ER1{qkS2joys>jiAq7#y@gRBcg zStsH*c${vGN8b?zGC5AoA$-S*tnqoT1@f`d&&FVclA1_R_8GdGNGAh8SRn@I4=-;1 zE|>Fp&u-*0DGFtE7>_fT?;P9LJz^J+Po58!siD?}U1E`Th_qA4Bq>-#^aH_cZD9d9 zDitaElvp?x{h$s^JV|qv0-92$Ka(X@qq0LpK~qjN`{(z7ukYVm01Mfvw{Z5`$NeY3 z^ekmz=E93}+9`^L3R?nKP}BB_ntg1QlO9%g`qE$LSWVXMqok&$qGB22y+Zc9!5>XV zT*q+EK4lCzI>a-qrCVI1FvOJokkpRL;73nOTUcHQEvtp$i-Nm~rF-tO-;iNT0ebz< zJ!?hW)-s1t@A2{6(bC_oGzWjp{IMM6r_5ZV&?1Do&Dd-_9q^pB_KP>A(ODc%WFd;o zB1l_8bpF&@;UYDpDlpf*uw?$z3tsU}+PMUfO_vhyP#;?QU-S8NdmzcQ<)zsc$Dx7k zr1uS)KBpPi-CXWr#p%Qe52Og;gvbJ)DhC$jNZecMUfbBf^3vKhf;FUF%WC1+d8JeD z4685eL1p{|Qt;rtEp27#35oUr4@_i9L>(ky%;f9{TzCuf52$zX5&GloF+}C|$2%_S z65@o!PF`MW&&v=s0Q#F>Yp@pLUwf3}4v5D77)kP~|2?IJrOzxqo4!oG?PxyA%@Bk_ zHKXTxp+YMSw~>8#p{yV=oHVLN6_`!^sG4}bJT^|kn1tUjEm1jh)J}x7uC;hKW7af7 zL<$1}a@)%OoegU`Lg%}f`1#KNNgbsBB}G;ripvXXxMLi&C%@k|!CSv)V*Vi?b}h4J zd6bQRBZW?FB#LYg`BITL^^egoSbJV@$qyPBhoO6&uj-!Tf7Y@bEpHUv;Kn zZ4>|}baS&m_3aEOC230#_mH$Y+VNjCU>~fB5|X{%d^Na}k#X|(2esC%%bu6{1|p5S z$JgFuccHV|zE_Oc?6=xeAfQqUAU33zS;S^IBDAy8xEGOXj;4 zzkYHyPB>*`+T>^P;=xR)yT?*Hh4GhrESZzpS$wW_2HMU)7kOTN>dT;OLaB})KYpP} z?Y)6DlD&&-y!m%#*bs|HMI5Ip?$-QER95nDq13t06E~|y@0A9Y)w9jHB95`$m`dn^ zFp6)`)MXc)X&~t=euDnTA1p^&=yQSfiJD)po-wB%h=VT*z}vN^p6+>b_LI$TKZ&Fo z2$M(+TzoZJ@zv$6;|$;M)D>AsLPV(gz$mSE6ZB?xULB~d(6QcqBw@?H6TbW?%D!2m zrsh$U_?9s?Dk(&UuPUo-!5A~>1WMDd*I*}_W~1KL_Jmv=(vz8LMt21p!9uUo{%Sd2 zmcEE^M#Q2}T~!?|B?rnrR@70iMZPL01(=gHT%YC>q0xHu=z8kC4nE1ab&H|yg%?Bp zIDc!}&+$NA8dClBof3h_)zwu!B`G@chXpvpA6zK%8Gvs{ zyWA+pmDK}`J-%YSDW^n8N4%9-yYvkG!qJGXhigWU69+=z$r}=;2uZUzKh-GN81be6 z5GPYfaIN!xUmi$@wd0!4!Q-CS%hbiTkZZB0G0k!5*YHY3Zr zMtSA6YS)Vft=L7>N<2fp-{d5VqeU++EHiVK5ab;wuOLbD*Iv}-s58PNvO^h;p_}AD z^AEG}F>3AWS_~pT2;zVZI*j6bq73xWdL1r|`N#Kj;ydLRf-tqz_RnVPPTGqQ`ZcNC zCgd5I-joD2p5zek@>m)4j0scrA46aCc z&kS!WDG`?DAbyE$qON1(6}GV*PKUH+IBkqtPpzw>6g2ndm*qB<`|~itfykmvP5}(+ zX>=)SQd)YEx(knunrRrsl^;JZaB673o(Nw~=4Q0CEoB>o=8DTd@6hLz*h&~f?kqz* zY&y1S-N#}${|WuQ4-tRGiO1#MLes+p7x?`CC4J?YYMP3OCQ{E@ihUD|6E?~oNGcYb z`uaL2?kIe+gGkm{UeZ{zPNp-bRk}2wh6wzW#skdV>B`@H!4M-{uq;vrqY7gQ$`1Fg zb!f&ZgAsHe;$bH2+^Y!4OQ8Z`Oem(*3!eTNK36%Y6soGC^6dSZ0m$o|Q@-qEp7ibe zHMN62(xH$M4O#`eei`IB2QvdExqe zdvU_QCHmU;(x7_YaMZmiw}g*HW!Jc^hTbqVD3_|a$x>|`2Rj|3uCNE+DLO%Rx%R^$#)NB*2Ez` zc{D6;DCrUsMz%e}NkH_&a`sV2ga4cJ8rU;qezQVqrKYz{!b9DTIGk>f}J+Yof z-Dd}XzX==B(Qbci>GOMQN>xx``Kn9xE;cvo)o5N_ z+w3`)F}>57H?>aslI+&wZslGz-Q2yaX$ym{rw*RdZmVX{%GCJHyiIkj>x`&-esm0d zkS1m+?>b)WXB0djwMoCC>SHA{((=8Vc|UBsVaV4(3w!MBG2QBUC1plah?s```t{4d z;5E5v!;P6^R;(C+f~_fynf69&+xMD%(YpP1p9IFo)DTgH2bx(&GG;KJ+pla=1nv|e z!o}rMm+dw3>0r{pEX0pA-&wRDZWE|DU8;Y2?b@|#$I8{3Q70cwFm*o5AZmV!x4L4- zlP=rMS_IB$mZ;@(ZBUDCy5Vp>oqG3H=J%M5AMZE5>)#tUHtf-}CnjNf?iEkeKy(k? z$5WLZT8);^l@74ys#VXiaPbOi*g4|FiLErciWxXImRz|bB--WUyp`}1r%*+zCUyjg z>(9E-VDjV#L-jf++brn2y9vMVDsM|!Sy^Eo(z1oB=OzC$p_?`}rg~^xu=6=Tt}G`` z-B0=C$&;xFSntxYfXpo>t6rxb?Ch5cVG~FwbtqC(Ryz@Rq_(n3_mRU-pXF*G5U4Fe z@Q}597*q_+v0mlt&)72mK^cDYs((?F85C}L_LuwH{5I5i8pN(?&29wn&yM*E7SvH! zS5HbxLiknx{pZhqVPWe0olfSexJ8^qd5gPKb867*ehpjn=I`C9?0S6e{P}<5{P_%x zR}uvN#(KM-o?qFVqor&Ty~bPp(8mSy=KY0)RvN9n`}Da?>NC*T7(I=BRmvt2EXJza z;VX*?hwi8Qj|8nwQBGi8e0fIj06Uwj;scvLZ{X6NtI4`dlvIW96{de_Oqc=}e z+OgRxJz?C~u~+z0+xF~f+_}!1ii(?^dj|g~xa8B3NcjHC7n@}&-DZE>ZMFxJK}jK2 zYegT3GYSQqPElFeu2N|zKq%L@ySRF{t<|aAm#9Y ziKeEbw%u63I8{BWylvs(f&77}C$1&hu^H$nU7NU$vX zdwqCb7w#gEDtr!qpf`HUX=@M3ry`!1nVVmxMxE>I+~#(UfcpltO7~BO{*bo%gbcqxr-N14Z5*0Ai6g% zE6|}b8K@&UcB(bgyF^Goqw6_ zVLNLJ#DS@lh95PCum|+H>)iCWc(N%xZTfe+Kc&Zt7MA|=IA@ote>7$&=Q`-Wlb`Xt zd740EUjpfeMT(Tr1%2DlBAd%_O>0MU>w9(T)KMrlZ{B>8PZX6HefQqI%P7TNFj(f3 z=>6!dsd<~xn`UU38y4lbZ6sHXw6<=E+`NA0I!xyHQ!HJ*dbNlWI3qLjFJ+}`*RHkh zzeL%&!(@MzmM#4#-JcN|zBlSJc<|t|g7X81|9n4^Z?5CvSX@-(3;Fd7b}fK`0f!Zu z3nf0$`4JbilT4^`5a6N;1{tP;>a%NTak3kMF3lYMgtQ%;e65gBZN zL2gDWABBY%D+D`lzcGi5hD%Hu0h!|KnXh)N1%uMFigf#Nr~FT!?uU28W$(?_&!GF7 z5c+B>#_u26MlcWd`X_l5>2tFAW{1IQ-=7>gdbF4-hFpj?vt;jHGq0MXSyEOO04|%y zdkAc|7{T{tPWVZ-a;>{<_5(PQn; z4`ZJ8?s|MNpO)Ue&}7yX4%yKM%QgfA^uLY^^as!MK(&*nPIdpd8X&fsQqaZ4C6#;n z0rKxkPL3La4(pZHhpeh#9X_=jLk7D`ZMM0iQHRRPN=|=wkHUpZ$60rvpbF%~*$=K4 zoS9*CletR;YCpq4gAPiYZ{n!P!LWbEJPB7$BL`J-lvhyW&b^5fN_qPSdo+Ua=Lv8D zO1ABccK571!BlMqLz=GC%;Xqd{fYDE&Fjr9!7>JA(_RHwck9y!jn3v&IysVv%|jZ^ z96WJtkAVZ#hpc>yt7n(@Tn)F0fh&*%zA>A~;P=5->v)A8$ z{CEY0gY}Lb0RfHX&z-xDqHG>DOoIjuQepVhO$OSGB~y&%2)F#&q~tdb4tmun0;;!e zg7>dgk%Yq%wXo=`hNt%g=nn1Dr3)>tdYoD@{@NBA>d*W7NtjJq{k<=!=FaZ5)(6c) z{7AuqNCLH=$z|oy(o|}8G9h98sZ)J$BvfwKZW9Hlr?+>P9zB{UT&gM^$M5c2mx{3M zYBkeQycK0cc7V=^Y0WIBlSQDvzzc*1`|;+crs=`Xsm*adj@=urZipOOk+kFX3ys4W|Jn3!8?17y?yw*AN$Sryx+bzKz9q*;2;M9 zoA$vtVuArdJ$^jRd2r$<&z{Y3H$7zap|WzcnOOrr<-lRDjgU&ME^X7UU0P;lBf|YP zc!nle^W54uM4e-mI1E-&QHfDBF8DkE z9J=oBziX3rcMLAPe?LU;wuS%YNy{~3t+~X1qpF#UwUePoc}wg!e^s>}?5;zfZL`Ra z3_DxikfmN7)Tfl`XHe%V6e1uz)UnlhbKB|Drp@82?=p1NM5eZF@7~R5S6U_a@N#$W zPbR5P9<-)Nf)Cd@<7r}QS|U&bJeG0i&Ye#e0K?B+Ie!NaM*bHD;=Z|j|G3aB zqz>(HdTDBstHB|GKZmvdY-$=gOYR;6i0`?7L$fX+nhd;-HUVvn$GQr-+Hnt8o0eC; zxY0tZYgbJJ1GOVZj+{Jm#*{9BfySn2W58q9>Dzwir0_E}Ex$T%po)#i$J=topn%;pM5Ea!`pI@?KZPLSaKLQSjU0eA)Tp;4`uhd@UNl`+XR;0+-G{!H>IbuVygD|!^zq$*)7VJ*k!%etk=kDlZ)PMhG%IDnm??JiImbLlDX=tqP(Z+A8 zrKP)f^{A#E-M7vgY-u^`cQYNojMF_%$6oAs@o_H|mC#<2m91E`Gz4}#5T37yB30pw z1H*i7`B=5l0cX~QhN{ucnmcEXMQr8A6J}`~dyBV<^Bt9#`-D!*v-UPk z+VrghNGO9aKGH=c#*SLxS!wA8T5rWbiu|Cco7;PE(R1o#rT0GSvdodCk3;>G1w45y(+vHFgS0C{)@0KT ze4$>hRiu-m%uv0u+!Gx!z_ISQ&8%*w!_u^mPr|lEcl!9c%^(K{ht2H=sq#di9%AZk zSBaU(Du;QUu0yeZ%vWX!(lrDwo0j!5SEcU|gbrW*+=+p~13Upu=yK0a6s;L`|4i;Bhen;1@pUaa z^^fQ2*_w`5b{Cj_oSaG(MHg-Dx*=Lzj}Z^<1q3pEu|yaY*xd%rn{U{+ zZ{K}KX1^r$jxsWR5;$tfs&*_sYybv2@VpeROL7m1c-_8z8hiKcdsbGK5o#E1aylX5 z8q)8-aMt6Fy&UwAxrZJz&$SfJfC{qlGE56SmOZ_&+u!NFJ#ltd--V5M!7Z~Gh;6_G zhPbtUy5O@tBt%(NRf&oE_JiE&DBgejFkg}qt**Nd9_W4jX=>`c{m`Pbv2NG&f{&;f zoCsY$E9Rf#Tf`#e%)zItJW#%0k&gjCG4y6@*E(I7p5|(#6&7~n_oRZNLZ2)jK6r3j z)EJlft#h26v!)lAn$GA%3Z$+1=#r1o{^4t4J!cKSFVz-Z^3-U{!@U{YYTc&IQeEBP zC&CEh{#Agfh|GO?;C{-z`}IqMLh&d4x60G+ z)JXwK898=rBR}P7NiLn~0Y+A*3egagl04(->lTfylS4Xm)7_RZ?-ZH7&i8*zs7YOS zZz@o13l6R|aNt0}u!^vDnibu)-vAEvkf+qOYgfsBybRiM3U^7!&O4&|?cth{X9*q3CqDI2%s(a9l#)VQ_0Pm&j;Iq2*GS<}WNg3(KI8%ep zCvg)Xm+KW&GF1nGsOqPT&24G`($eEGnBh;9|WF)`f%k4OGli$9egvrV-X zGP?NB+Ktw)zLZ=?##m8FBe_pNpg`Q6ax*R&B?Y8%FVZ=teuE6<4$ z!OPdL*XKrf1>G>p8)s`rsg|=OKdBGWtY>fDZ033HO!~z|ILUUm)~#Dhf{tx&78wa8 z|MGh>=*7Q--lp6tZ3-Bxv%2BUjCsfDdiKj^jJR&NbBpddNZ;p_1ca9>{FRTl+r4_9CZrD?i=nH(DMPTs5v9|cRy$C!@@@V-m$90M5^ZTct8mH7E3{u!z+y&a zPBYb}*`SkNW!YdIaYAWpX!@RCG$B-C+HT@_o6ficet)16ka?Sr{vN8Km}Xj3hclPw z4YX-RI=c!C^@nkri?q;?RvH0ZMa7pI_c?fjC#GXl6%f#XG76esty>q}KKh)PP@E3 z&^&Sq0tG{_k6Iji7rr%_Tq$EnTr424_3+h(B7%z9WlDWy+2o?KNhe)h-NIA<=tL8Q z5m*!oH3sDH+pwWN^UJfB?SW?*zt>O&Tr1TV1lo*d=3Gx5Boph79$mb4aXf9}U8>KeTnFrsma{730GZ1hOB|U>$cu0BZX_Z^|i)$=3 z8GjgLQ3?xgxR5$))~u1L_fg^HsGl1H zvYpw#!We67KV=tp_p9{X2u%^MrkU!>{-g9_>K0CbZu|t8e@lERX&!Xy*|RBKdPsyi z@5^VH&YsLx$MiJ8<#ha0c&!$VFlAHfgS^{pW&;*d&dRaF6qC zbv7#_BNg1and)Kr`j$(N^G3S#>(>Gsx62|bcx7m6uhOw&b7ZLBe*AdO{~B02`}f#I z4i56t$Yv}2df6p$XL)< zAUja>uid-bh=-NDcTnc{J6nQ+u7S2r;i7)Q^MjIn0PxG4Wy>~$^V<(!-I`cu)5Hy_ z^8n^#4K4io2fi*Ob}UVXZKg9Y2Pm_v-{OfIcMiD#xjg*$&*ezs#w@J*q#QGn0Is5; zal1`{b&N<$vfd0&=0(rtBD%P_{RI*^@*HVZ!@;7BcGhe_~O&_4ojnBw@f z_w~DX&Aq*=hWfuntS)3<)YDIUqKCpTL=XeXeZs*Vt+^m{ued*H_wEJzJH?&>rT1q2 z@8>se?!y|jj`U+a-!i{`GBNQw_4c;?`-7N_t%uz7GFnVJVq~6E3~C*- zO$@5)wEbZVlR5M**k&-OS;ygTKU5UVt`IFSUca>$( z^IQRN>I2oQYGRlBX8;OClwDKpBFoFlBB)lmK~pz}zq@;|Ha5RoxLOW3-1z45WpsQVonP6D8|4oJrl#FhcaW6ACg+!zHe*zR zMjb{7o5gV9RRT?*+eyKc)d(g;V>S|l@5*pmvJE#Sa?8c|np}IEj@iLpkkG*^E znUcQs2N2|3>F+>{Dk7|_>Wm&WDrRsiN^avti@MUJtc#3s?_dwztRAZ0OV1@I55l%j zN!}R~Ym+T0?b0c_`;cc?L;DQ*!&OM;{u&#fx^Z;V_iqmmXG*y_>Boc|Lw`_K`prRz z^m(OKv$vfbzn}q^Zw^~Wh!wwz`;NbU{(O(m86ZIz^9Ok3xGrA2*zTNrLs+TnG>2_8 z_L(iM;gVmzeH(<&Amlnn<~d3Rn{yRhquMQ?s(bPrhy>M*2L&E)xFrf=+?g@Vt`T-sCd>?b}=I-Me>eN?51hSn;j<@a=Y1LGISjx<*+s z|KOo|nJW@;`0(E}%ML?Gg$J2B_i5K|k80CHJGD0c9Uo)$816=F8qx>I^>58Y=&1@} z3P~27DK1PJ^qBrn9RZT<$-TzJQt6n|$eTy%C|}*ibW}}6c81P$C#pEQRAYbx8}nAu z?}x}MdwjZ^Op2`cGmXXKp$`wGe%1k_`tN<}j`$wdhrK8)!03qjFW1`ikScnfmSV&5 zP12n(2uO;8KprMwsaF@V*U451Hd_cm=0`6f-g;cd4d;s=7PYl81zb=LNEO6OjG{H# zE_20-6)r1QT;uCo93I_t!c{E@jP&5YhOZnq!?1e;c{=S3T{@8@ik?5e;<(=IFq}bl z^^k8~6Q2nE4Tr|~y5I93hxzW3v=|s<@_HuKt~?fgVS}bk{prOb)o59oxa_ei!TQeW zz!z}IV*L$^!lcOfJp(5EeIn%P3Y-2KzVuj0u1CN9ZQGU)CnXL|A)CFM&Aw1B^zo@) zEj4UvTsw~MqG=aA`w06XZ=rK%30J#@tkg%>v4w)*%KiIRX*V_oCQ^MWPB^Iw!33GM zfuy;9vP#S>8Y~QCZlOySHKmv5nlH@`1;%r?;6v+?`qrT`xJ(%gpB;dBcrV6(L3occ z_tPt|XU~BCQPEkm*6O!VJI;Uw#K(2$W&;iXr*4GnhwgPWx7?>&$Gw+X8i^eWNn1rl zbE2N)o|xSce)&$~Oi^B_{cTR1iM)TyR68B6#rj|hB01guM`w$$k5_6dU%f> zKYqsjiJpGCt598Vp%cbvS|uZK{9K+huFvaY80rlOL!ilAfD`F%3DjiLL*~mpd-rZD z=mnT6Wtb;p9EuAw_N|Zp2VgDg5`jvPmoC@zmXg5qUwr@mC944VTyw*i18asjxG4uY z|NP2H4_qmtSAlQR=vWtF&*r{IFcnrJFHPItU$ytK^kayM(wm_FX?3=j z5ItMECFjna!#2KGQF(dzVr!N$XydgYcy~SmInVDOwJ|yDv-YQFO)Jdj_pr}jz8HKP z8_=j*VQrEA_i&#ah01>m@T);*v;k6*GoC;5T%=*?mqXW?cM=t=LL|T+MRW7)>-gA{?tw=B0cPGX zp-q5A!c}bAq-U82y=RxQO3kFf1r%ZSW6dJlhG<`sZ%XTy_^px<=X8oh|~n zhd^aRVLSFHlZo53HddXsifYxN0~lWRP99pfg4XpnPVGTtQB(|`3^?@B4qnBBZAwNT z16=IQA>fqFYZNU3Y&wTm|bv>{z-|NJ>rC z>RgOT4LNuhfVKrci%#9T)yHe?3Ua|vr}NC3QXIhGQw7BJ(YAcND?EH2&RoPnUCSZ7 zW9r+3I2OqfK&KLM#LskL>GS7*QG|CgG;B@Wr`o^guB91@$VW*b61}WXC!EcL@uVth zRG&K0n5y6PZT=t;M}P$_9sU<%Zys0k`tJX)q9jwL%u^|}$UKuFp(0ePiG++r8KVp- zL?uI!d2FBonaWHeQO3xWp(4t#QKn4&o>%tgob!ErzrXMAxBob2@6X<9t@r!B@9Vl= z)9qyB7#1GBf)kwBM}xL))vk6~eufE~UzyI!IrRqCx?Y=WbAE$0FqjSj7!R!Yxj1>; z1$>#`C#Jk4oXse*5^Oup>J?#=NNoQ6{%Ln`bT=UZ1t&dj>*Q1q&)qs=859bxFy!#J zcaI3QxegFY?c%~@s!KjA>N`=kj$6X4Qn)B(9Ub*VxCq&nhbLy%ZtpoFY6L$>@p943 z?c25~n-N@AulK_kgawK~(6|Is!-zKo(M{mb2uj5k)W3DXJ>TcyVMB^F`ihM+b+niT zolrHb(6QNF@v5G_+34eb}&fK)Iw^^)y%0kdkNDIEiFf@x4ojTyf0;Ga&tGzS}1kUvM{;@!6C{$24gAG=b|2KFn z?4oUkuElr`POlMGs)aM_9bWCgYeT_Xn>8gov|rPaqc7&>rdU?#VKeA&)5I70&dI9N zpaGMS@JGtEC2!xxaNR{Q%ndDY9jJy{txW8jfh4F3h{@MQevnbwvku_NiyCKAUP%8u4HbKDnXsO;HH-GLC2h+#+FfbbjNa5L(9?`)*tYv zqA>#t9mwEm)7r4H40BGYf`NIt2m0mW2F29vsk@Lq7|#_L&+7(vuY zAJGS)%P6D;51Ys%l>wTeWVI}M)+XAQjl0jB>7&>~pXFR^ww763YJ?nrtzpLFIY#NxRA>d#@_QQl=b*Zv>@&B<*LHHFg`G`)`l9U$_?za zM=*jk*s02jI~DC7qXN2$X!c&*8`MUP5G&8}^>v)h;3(s-@E8qtIMX-Ef5pl~i5kem z$XOx|{Waj4At8Yr(dpd3KKs4L^y#bVqeX%q@0{H1Uv(mtylS;-mr&b~f}sBKO~2JK zF)iixm;&d4s-7~Q*^_NK7)FCpy4Us<$9Jt+Q)`8euo3mzySWtm`}p{TEwf^HJ@T80 z>f)tK%Lfn0@v7h6ZItV{v!25rzu|#G2zNvJAk$htJ{-Fyi~juT1p|*3Vob3%tXSo7 zFOg_%JKLSIMqe|I!G3w#y)Ff$0 zRIM8KK6e+j>Hht#1?xE2Q4R+HJ`X@Nme92aPFg|e5(-GLGD|*bSyTv=CfX7T0VSt) zV4nT@uQh7bB1^~BbpmM>?dIJbW8G?lX{-9PlEI1Ii!IXc-K$qC#h95lL@S6S!U2)S zQZ*ETNTQA+{H+AR_&(8dau5?*taC++$XR#s;>Dgz|MaCP+sl;2q#r}@K>62CtrFw^ zd48b|d>u8qMf>)(2mA?IOw(}Bva3;C-=%+=SZK|Adw)48R)V?^FP5=$gm0qoD5SZ+q}TjRJXYvu1VdYi>S)do7yTg=W12 zjGwh&K_VhY-ujh4kcfa4!JVq>q7> zs6-cyt<#~mX1~2}mN9icXOLhV0#pHLb?n}K2#P>}4_z2w0FsP1O?d)W@Jhq^@4x6g zFm?wkYdkFvI#koo+lqXp!x9#d(MA+M;4e@t?bsS79;mFofA{+ZTovb40hCtBWrLRH zEF`T}$6`Qa?Be=cN+(XX8+Cz~)Qff|1G&maNC}Huy?7W>ZSW$c-?&i=7_?313d^`I zTqiHDNtBR)unf{bGFFoj`4zl)QH6DfH=*u;Kk5rWWN3b>a%g^jpSSrzzG?!(*aG0B zi?m3*8TEEtHFY;4M0Jz-l)0CWGje1dfy@{t)_>?bapFXF@Lq5a(H#_&l!UUvjKKe$ z54l~WSCxlW>#Vv_5u92+)1|{N4E}Z5XSFaeW>8#ec=wocy}|dNKcPEnFm4q7+;Myp zvMdPeaC#rviFAR^r@bdlGG?%Eu_6#{Mk65YHES!)0WtAKM~$tOcl)!)n)R>v)rF32 zC6CDTOT#~OViRyux^-Wx3}+A%NV@p`K>>$Fi^2pst#>`Ugh`Vpi#AnGXXvyLjT-et z#_E6Q7|J>ssc}RYwP~Y;!YxB;Qo&2h$uUVut3_R#nwkpnTSpQKTrvXVkAD30$*ynv z1GmSnr)cEM*>||Pk$2tZ&DY@uP*Z?;ih}ePv$miKtWxJmdHt6Bka1gaakT4ekj)`*7&Ci@S2NkHclXy3 zw*k75jLxBSWLn5K5Ee&7MG=c5|D7DU9MjtZ8d?T-=9fHiVaCPQprx&yPBj&`jX>%& zL(3_QD>vvFd=SMqjeEq79W`5g$XAUWJC^RM1~M!m;u)K}8f=KaheUq0`F~sF}NgE zQ&SV&O$J+f7JISVK4ax}ikb&}m0#)0>X@=Rahr!6fB@R7){gj7ewZncdBNrQG~H%> z{S<~(iLSRRyr5xOBucQHg@esLT`TGz8lqgq7$@u7uDBUrgOsBBXz$P5?oQ~rR7G$C zrb4F!E^0%E4kZ=uM3W75O+U~~e*E@r6`?>Lhcy+{@O(C6&xN1ch``T=daV2uF^1SX>sAsfY5+CD69_Rq(`aE znP|h<|MM%zzpnFP!-HRWo)|8GfB`=@pIm~rd=^4IOKvL^{Z@mxVG zq>Tcnpn7kgI%go)DIt$szh#S7zGJ}|nB6OMe#wBjQZ29;=Y@U16=f(ew8Tt4+8b`SjZt}Ka zHqUNmW!Xd>2~m;op+{L}6RUP#d-dv-V$bKJw=K8jTv#e%)#l%c<7cGeEpf8V=D%9} z`h0*ng4$(Za?zCiqJ~IOD{F90XX?Xp@}VL!i^6&*?*~lTAc|@ zR2=E|W-W}aXt^Sdv#S#qKge~NosOO1-)~zxMAho-u01ElGS%zUt-G8;xiRrKfFsNU z$i2XhzV2uJ)bevv{5_eS)D?-U30s4YKWEmuO&cZ*3m_c7dC1->UtU&8VHb*uX3hv3 z)f@|D$zccj4Eq}F`;;61u%j)sK;#qRmPQ}tA|^8)&>fx{4c@8C@AnVix{RSgt|UMa zpV$aEdmVryB)kmOLY4IGQ1Nq=$m!YUv^=>gOCv=Z%dj?5S&*KW{_cO=z3KKvgd>YS zJ>6*BqH^DnV6ulHMdn-^ZfTj-XYu!rLeUCWD1_6oV-BaifkHtct17;upzAd0s*Z1l zv9fOYx7+-b2`B>n*V2U05yxCz#LY$RLHt(GJYzF+X%-QxGC~hVe+MQx)WgGw_E_j$ zcBBZ_X|PlW+yf6`cX;b zp%#~vc=qh2t7|$D!!APeKzuZ!bl}|z0+Err8fqHpA&OsIRj$RY4o-pl6=4k}z{F=e z@I4eg-5L3=-C|qXHwPS}9M|Uv$epKe6g6H1OKbk6muUR{)#9ut|4}u@u&VT$JJ!cf zqcLeW{O*{fzo@`v7!jM?Kb-uEw1`ZVCqEu7ig1cNjPLkYXLDmG13}h)N~4VD;(sSN zNQZyS09Mr>`~eJ>!Yqn3wjMl01De3DJGSAcJF@VL*Y$1PYH$m#fLwgp+Eb^#R^NPl z4TupNg>>ma!0ws|=FqFOtGI1xavNrgZnuDw{^Yz8J;`yJmj|KK5`X6Grz2FZy?XYP z!V57>tPilQ_T|afabhe2U=1J@QU+a?hKbLuqMe~3U1wqk6uch=mxYA|zJ=&{dF)+L zJ)-~S)~Sminz>sx#{a_Fl7>Wzk81a>_0_la61C7`FenMJB~6$A+H&lrLSaCEMe67e%CdQ+$V3;t;$(PaEYaXGdw-d}%Zhzna;MUeiT* zy=-hico1iy%#YhNm*L?(36e7zvMpejkvzF@>35FzQj#sfpe(e4XdmJ`7~A!du@&xv z6Kem%V?V!4Zz46C{3VA^AsfXEBuWF$a%l!w&4QrgyKMXHR_U^9bYZfJr9y%0#N(1~ z6;n6iI%{B7Ne*U!Zizk(!&OiW&yTEkFg zrIRBs_)00{;la!_Q*9}jpT zgx_S_>gP?pmVEc;=aS_jsu@+y(Tk4HKus{Sz1KFMGqA{~Wx9-ZG`a-qr^*JlKb_&l zpoccp!yP1BGNz!H`p=l+;USC=7>3;Y)M?^N^ut#q3l_K{cE`(?lVsO}5aP)=TDbDj z+i|x@7O6we!-|Lj%GnlX6FWy_860iZkg(uKx0rs9b<>o-=W}BZ6QjHex9qADHGV~wX zbL-;+u+~NwD*p80`(&H6jMyhe#=qY`W!fFFlQIRE$)!4|CoEM%2(YLkalS4`Vz}=w zdEAu%x&ub$3I3MG$`p<1VmWyk-~9w#U}OyYGWdBCt|H} zyG&;LcUg1%A~bp@q$ey-azfBh?z_#&JXhFdz{noA=G@|3g#V`&dF+bYwR6Xg!~EPi z{!JOvcAz$*q}A%l-ltBLHto@|;{cwBFxz|ica5y9RDWFF?F$QBBz`^A_cavL@=^~@ zS+1iSVI$OE@sm?sBv6Zj|EjTh#XgXXiI`P{5AJ1RtUM=@M{q<%X}y!RYSz5X$Cmd+ z(*vx27w&crLtQn>-1Y4}>hK+x;dsWr7s_^H16yzpOW`Mq^6I`1GBaC`-1`&qJOj%g zlTbCq01ynYSarN6_!ubpf*3+DIXth?!YEp=gipr#9nQ{>IdWa|{#Td?DaAE0+Y)M` z2Nbw%EykOfm6fY2yY&f z#-j+dLdM0Vx>wdjyzQa$lgvsA_Ntq_quppBb(0s5zV^L%{`s+6Kz`FR9` zg~4bcMcC9&PR1#Tm1eV~VQN{K?j zjEK*;&F8aJr>YtnrkeTOW!Q@c?#prFSo4+Nl2<+HR}fs(uH4}K+lIiqnVbdUoRoZ2 z=5PMemgL5pwGE9D2Z);6uqqM;)k}ZcAutBDk6vl+cEffd2906XlRG?@hFO;Iq~e>ir;;1awQ*lklnt;d{K9Ru9bw0)TGxPf~WxF z2|ut+b2*@uoA@4zHuIB0QH5r_;4|z2=^ni6IlT+NBA;ns@)|2udw5;plv^s5iMQv< zM*t$Y2&*U}drX9OYY|;^=+&zsbEo*2P_l=CT@7lr;Uc^WowX?L?!bMX!$uh1XP;c@3eK9dr6qJv4rDrjMs`+(G8nZRXw)8_n=dB0AtB2leOEHIMm+n8% zc~MUN)~JVpM|#Ot0wPiVX|C(J9mrM25h#`e_8+{_*`H+y6Dkf7+8O-$s?L&|QqluO zr`d141U>m0rXtFrYJ4Y9MyB!^Pfi}Od>^b65+1H{=xt=Qa3@Y;7>25bRVjU+Bz7io zH!r3T5JIe4Y6>(?E0!-`u8cuFeP@hB4WrX*^Yx4ev2KO4p=iV2JNKLDHg4R78ET>T z-pQW>kuh8mXl%`j9Du9u4x639wmyn_t6{?i&6v@SUTPNlMoJ)PD|l`?>MAk|?tIQ} zkwp(w<CP=XBOdC)R;-<$d-c04kE>VXRlkvQOyCW(uLt<}v}gJj zISzS6roa;%9#t{@GxDLp9y6nl;flfvR&T;BF*<^wxxvMPBCgoGcf$F(5~#rcmT`;p zI`<>LTS;gEGkW6b!nw)`_wL*o$iOmT$`m=bL?F&-OhYh;Frvb~KrIOF`!4*f)o1>@ zHebHE8u{9NSu(rc?F6IBL-9DGR79uX%gZ!;M9OJwTos0x5nSjs-qo0Wau4}RKnQV2 z@GTj7UZ1}j`$ZJG{H9Cvv;YWkZ;A$++RBV_MRY{GQETxwuSz>*0$SLa9$YXT|o*!tG)tNt;FjVugdVrsYm8SOOONo zj6Qrrk;qaqCDlJD_$?n_*maVV(_~Rq4&iuDMTyps>nO(fU#J;vk?`VA&kv^@2B(Qj zZ5qKxJvi%GfGF2l2+J`h?LpEKKRQLpx!bL2MErFyBsB%FNd{-fxWca_Uk%0$sYuJs zb&PuyVv|LF`6pnXYs!+j{NJ!8Rmo$#LX1WVoDAi!B{8=N+ch=J_Fs+oB8EY5EM`7U z#V^QXuoq1d2MjX5y?eBr-(lKsbG2HHRQc~dJYZ6*Lu0RFvC;o_^N8MrSE(r==0CwM zZHhmLpj7;ck&(-o>XTZ6X%o4sx#;UVJGXH}Db}I?vVa2!n=y6Lq!kD|_E6tN7$cVv zBc0PG-vQ)bl=mQYPW?J8i`QZXxF^z`2{PIpm!QjL#1}NeJ@H_CBcm@a=NI>?B#i76 z!k&snnm$sa(uFshZdXIWPOAbS6?YSl)m zhagXrv(>rX$oA0BO`ERa;~65pO-piZ&3+!-zNroR^~zG#r_Uu8n|PkU9O0_8;A=q< z3Ya3p)T~7Z!(EDD@8@!Mgk@%3S8tuOkujDrM3j1IX?rZar4``<2{HRX$vQe%L`Od2 z4y3|Q1XE)2{-O5`uSyV4zW{(QgBeZj?6#0gyum7T3ARI9Ld z!*;tiG4!6|5VSMl3E@{xE1|9S-`znO`aP-+_V0GO0%-+*5Y z056^FMpMB|o7z@uVWg~qfL}4n|IUi|&Jnk|7aTniTMJHzNAc*OP3a$Y_@Nx}Ab~70 zU^c5@TIZX6Ka$BiXRPcXI5qMX@wKBriaSjOxKlCJ$EP;PKHId-q5VY<9-wxCGZjrJ zqH5O}4LHyLYVexIX%uqmXS!`E1g;H&ft!MafyBcx%R5Rp$VXj(ZKn% z0FH^xX)onwV}-l-d`JFTQt_(u%ig^!PUPR`M3{hyI@vpB$eP<22}P!gTBF^3hhPD+1?!i> zMQP;)b3g2I#@Uy>pjtB&_mz%`uqh+!&JGQOgb3!S-*kEj0G#8Nl~(6CAgdsoLc>-^ zyaRH*kIUHPHhe_rY{HKg2D!WsDg@})cXL#Iqt5c&l1E&j~}Wh=V`M>=v4 zac0_mHb2k#m$h10F29ChMppeeE#4Z*=GBclXuud2%uc z+)bn>qOYI3n;AlaPgu}TPp4?x>m{rLfRc(#)6@6X7#Ei|&H7_XyX|pe@fdB#)9<4VWiniT=+FJzcx^v`&{=xF;_}>fP^y;@i8m`v7BElNmxA-<0X>aiFCafrx^bwBh2o zLp%*Qw^l-^DhBk2H&u_lL5&b+OJRa#5H?P=haK6r zu)1Z2+!Euqy@F{e%;LmRVx5D44y2%?(1bEz`b@=tM7wJmTzNKw+x61hqBJl6`Rj}M z*ihwB1R7p?crxjdUWuRfeFw8yit&7(0ivKKL z35xE2$lgv9R~9}5{9Pxg7^H$=7?J_;E(T#h1Fll_AKXfLBrRU7{nxy zOHXJKzLpr2M4Bq??_X)4Bw#Rd8zIjTY2crk26d=-GoGF{U{2h&*wr$FH++c?^NGtO zpep;^IzrQth7O72kYh;|^!^MR92UGTL~xPr=6Dq<82ase5i$0OYHH`P&i<|Z1DL)QVvR#N}25!#^s@wX=e;xOse6wiQwP}f*{4(14S)o zIwm)DHBK#oH5B^8He9fFFVOR z6YMnv9%s(HO_oK?roHt6z#0uFsZN9Z-Rf)=f&%Od-#gPRvb5DI6~#&C*YkZpJ0X4| zghI#DZ0qoq>@jq`rO#SAx`?DSj`Y(&(YQ39K5W=9P-X`#RW=8}Tf`>t&SAK!VZ%m^ zgsX?P4SP+Vm%;7=@LAZ@Jm2w*9oMM5IY##4QuKrpfL%MWJ7X_shZpc?WE6^%nrhmc8$4W7i?&J z=;=VqC@W&)fFxHU$ZvsA)M%Y zz<`D%MtV4y8~O=YR7?8(@slRSmiSqkl)ih^W!{HpO$GJkaF;MP4t<5<$&)Af1qD3J z=x7A{)Xq$VPjIQ^2a97=NV!529o=)?BzhvRTJ`@KHer+#4l3Mk^l>dd;b7@@mu>#K zdFfYH_IR$j;j*^oCzmwwG_Tb1fz78gL(L*oLoxWn#g-Mz-%~d^kB`o>8vOyOo=9{5 zd3J6!(4+@-fY!>=a`HxcJ%nl=6as+^5OtL96x|4)T>^##d5%)LZCS0J(+}mIsM)xZCB5oXfX_ZuoZMx&6Bxf}gPWk1}iDJ1% z%tOY4Avn*iPKH&5um=G;nr}Or1+nNmIZt#QtC_U&*$5W@!3Av!;I9Ph5}Z_#RV(Kh zVht2u5=hv-VQ3a{!TzKY%{}iLRQhevgj;7$pDrY#Sh<_!KeLy!^Mk(q&zw0^f8l|4 z?jLB$v!2uXNeG@{Rb%UAp!@<9PzLvcRfhe1jUo!(WFvc>eyRdxDj~eI1Rgk zN(56fkOYP+Tqn2FuM;6EC;Z`~N9}iP#%D*jF7*{_d~BiS7$$BJkK1OnR&M%rWdP4Z zt{L6(`vD*KxX;r@GEyv@7pI)*qC*qKJ-g#^Lihn4?J_!naDd({=HxXoZvg(N2NpKU z1iA$65dlY6gWd6#Jnl8HXhy)C1D>gW+kV1GG$OQ9mohTq+`8yRIZ`0Sj1H8_lr7C0 z&7w_;!GCnutfQI7poRr3x6pz{PebFy_c5E3qz%9X%hb;0u!4F+3ygda{=mCjqqS14 zvL=a6j#pC+uVt-TLfF=9((~}c`FJp7h!+v9VO8Pgc(74}Jfb7pPD}1u-g&o4M$ro- zA+7l^OMZO5NQM) zN06HcK_dEhh}4W0Abb@;91iKvo;BkKt^#1t+@Kfrghl+{;RoYxmHpVkL41!S#9G|3 zg(Z-sLsZGO9B<&%nWFvVd&&o)QZqBgf3AGx-gs{1w~tT&F9E_HgM9zEH)hfSR0+VYcJzg z_UC}p(1}T|4Gpqr{k{GTJqZ1JZgJh@TVTjmPx%_F7-wTRnpm9e?Z#L~O0X?T>=?Qg z6qOsd+3Q8w8dQ${s~K4p1Ca38sERcMfxyO70sOvy+#8KltSbWwAc&EX0g9sd!cY9< zC?$7SP~VUEJnfy4a+A)+_1!-0Y(5dF?Y6Z8RdPw{sG6rW0|A$)ak^AQcyo^4 zV{Lff=1e@yLjVsh(WxS3WvS`btGAN>II(pLy0yvDwq^=;`EUHdMHVjwQ0RLF;_X-g zOGkcXsF5<}@BgzSkge72Mo;K=t0+TYmZ#$okv=6;wI; zO=BO+ZS@*8(pIpah46GO-gm-+!&}Z?@IQ9prs9Q@81B7>ql+6%%ApWh==(3wW7J7a zJ$!i71f@}Kr9HAMofRNiAx{OD@yw}A&qR~(S2>MU<2udzsr1hcvk7oGh3;N*q?|C0 zlStSau?4j-;`MLX{r`g?I7?L8a74%qC4RE$DyxH3QL$4C$qJ`%8Hit=s^|Lu|02hz zU;>QTWgscwr^D`bH!@PEeIs5A{d#5JZ28%Sj--Wkq{oo)*4L89*c^M%$j*PmwMjy& zit^$BLo|OxL|(?$VM|4MhI*J*@(!X9k-5l4bMxj!dt95^eYUT9;yc0gnRO&m63WQG z-=$%t>o^wnvrH6$5Pyr2ciD`axj!yWvfr>~Ybqo{&#a}L>+C6l_L39_|q zfjLB5_i#Q)f>^*f+jZ1Mt-9(s&-`=u#{C2OA{mB}93+NhbO<&lzLb>-Vo?n~Wib_* zC_mMFKw1d_B`9a?jEF%2Zjr{K3!ylY>HIQlSZuDiD?sutbN8GC9nhwZr*ZZFI_)4% zM8*v@`a?qe^0E+4UZg%A;qLCf=#hCi5?C5Qv?SkS(9qTui(T-hA%m7K;V&`|T$YCH zbx+V|%Pif$CiG1^cdns0icA$nKDZj-jO1jZ9ozKb7u-5p^E7Oc5X)5*qJ{&^EvVlu zu_Mog`c7XZ_Tmh!MUzQkOYGGY1IT2~E9BCv=#dCv52dN7m-mXXLA*{Mfs{md!y#n4 zD%LWtrL`Uho`{g8Tw45psFXPtOmB(>9&)bhZ~GAKB2+MVi9|q1@5CUIC;8@$zTlX4?AMwG?hYZr%a))s3F)EiN_=_qfbVMb z`S<57jyB?c+WK_ytUHDy7w!VHKQ@5hCn`OJITl)K3l=O0Z`3VO^o?$?T8N!yku3Ia z1)-^nokKr&g;8kl{{50oh`?GU)NFiBAv=i(7=VFcS6z#jeLj4N+)INXQPZ5)n}-dF z&Z#_ff@Ri~taFN>!7YPtnOs`)*aa2Dw{x-D1*i?gTEJ1baa)Gj?uK7p&22Y-Ln>D? zD;*ZG36P2i7$7JY1CZlJ^nys=Hou-)-r`(gn?=n&zB$&fbLXR~@(|U8zim)EYU1g6 zo+O%34J0@T4nY`ER$gK!Q@ck3P4iJQIrk-=iq2$i`I1DH>>*xLY1x*Lcs+kasIYKB z4B5op#@sGL#oQ75AnR4~ZHu<7kDn)X#HJ-}4$bOiPSn*qzHm>6t?XJ^EStB-XitPX zLrSOk_{O|16DCZZ?U9%JlakspwUDf`26|SMMp|k#p~JUztqyFW2;_-OFP*GokVSu6 z;<2kU`4Ou@Nkx%K-sYh!qJ>sRKYOjQ#H_7PzWczQheNv(?=oL>!AMxvp{piW3nc;5 z%?fbI7F?cEi$MOHY)A#6nEx-y1irOSGz8A#UMR_tq0hYpM0ZG(Cb1U6L)NZ6^5kXl)6n#2^c#0usgbq*`S|Wc&I9EDB^Gxs z0(Vb;_B!!0M9OGBam3cG)e-OcsE-8~z1JteUS;%UyX**(CYJ)b5^1 zjKC7UD*Tk(L?U0*(lR(Px&+O@8_^`zOG(K2SXSoH*B#0oi)L8gahS>giUv}2#L>)a z7iJgUG}&hAO%U?44T!{91DhpLKzPs)kO6iLqD}lakB9TQfZr+ZY4EED4>H#q4noE& zNb0`>Pg+t#7VH-TBtV@|-`JkFG(zx9C~3rFnq)%|=Oc;3t*_M8QxC0%$U=FegJ?Z&atrPc_}pEFWYNNbHH#=kQhQ{Gv{bd= z?39oh4zU(1t~wV7ENNv07!=fn!`e&c;CyO0j#d(OjyrK^iR=^_SMr7_ZbKt}4*C}% zpd15hHG$u+MDsAjz~%{LA^I28cr9@Wv$&;^#zEnFM{|6AZ0Z&+qohGvAqnzd@R@9GP%f^=(iN=Bj-#Vw%w@ABwy(W_YIhm%?Ofaz<#I>}ST(I~Y zvH>&Ei%Svw+l?Bb_ee;sj0_oi1HDTod>qf1Wp%Y|F@%`GI&ObeO*=B?S$h&LE4{jq z`U;sH;=+uQ0m_<*%u3wCPtQ2pz0(XN$VS#MIlm0JANgpQ34% zY6ux78NeLZ69^A4YQ8O`*z6wh+Hp1R14&ZSd*WOV4Zvv3ABYPDJyIU#PMo;@Z==NR zE`N?D3Jxzyj@%mZZS7qy1entsYiu3vRG1oRobb6$zlg*~4SM`sZ8!aeKVbm{ z4`4E+T-QQYu(-TvGZ#`c#5yjnHvDdFel+YlZ9Rdwc1KXb>A(@a?H&j1(@1%-$1!I}B_bKHWbkr{#5>y7 zg#0znL+6m^d2B_ThKX|>+l*$UiU>udS*7Ku|F{(&F;byq7y#0$QUXcxV}|nn(L#i< z#!S~fJvFx)>(+uqn7!{!tS$Ka{0|>?&wSM*B2_G8z&SvY!do)>n?4om&K1!Y8CGSM zuU4ndy1tpKFwXFTBq0SvMLA-`#?5GS3XkZhgKlY%#);2SITJT*4>@1S10YS#zR7MF zNOD@ZU?E%COy_e7^IqTW{L!kqq z)S42$R84NiEBp>F9Zpdc?>*%0)PDo!5^r-D{7v1{Bml-X)6jl9D;9JSF^U*H1n`26 zXvwnXW5$zJA!!oSzd^|4n%o7G5}gX}K+y_fH(*ZaB&arRd<->}uCqOMH_1$c8*n={ zM?)Mr0M#C@uTlR?uA~1CbH9&R7&^l#(jImF{HEC3{@y!p#~V~Bqc3f}{Aaf}hZ?XX zSo`-+@M<>O1W!*w=kT!#ZXj1h_3wY7Z^l2=uZuTt))q#Dsl&19_IS%m#O6kNx;h5j z9Y<_tnnMhTkdVafd-L?8?en-Ki3505rJqE!K9o|^+*D?Uhuqe3{`Do)T!Ve^Tk!$1 z<}6syA-7ZX30qiIMl2baxEm-4R|}vW`-|P2J;cN&ST@htJ!_t2BFa1l zeVIr8D@hLk7oMQzvuDp-PR%h9RtPDfe%=)vj$H9x9S**R+r|7ZCO1R7mM}Mq!%eOs znKET1+arb@_P23RdGPEX9`~#(>yD-~)K_b%B<;jy@j^v*WcZs88h_vbCD&9;Kt}*n*K-f?D_`1gb29q?q1Y z&+ZgX@~}rimB8U3ux0*H&Me$jTV4-2tYh^27L(!1RjaCzThL86%F0{9);zG^H1rMP30caxIBJy13(Ls2q?7~EP=8n&B*`*<+_9Vizpha+&iHguT-X0 zmArW)spskZv?JUK!_4l%Aq5neGq0;r6u{6$MniF|$hO3h0w(3u($OmR2lb-U7%~mk$w72(-w3}O4<)Jy6v~!P2WbQvX0CW*PkiFq841b^dri{D_SZXriKIXJ z8)x*U`p8)0o0Snx%(7`o{nG%<*?8P+(T^>>Li$9g`%fkZdZKZ8gS*}XZ>7r-4-FAA z;soAPygj4z+iCuDLQ^E2yB_tmt6Y&o$lLDdEn_bPED;)2pk7Kh#@}#oYd=3fPBy)? zq+1_>4Px*VH5xHeCFXd!EXSat8Zd)IBVyRB{@?x+`V*X?;WXiG5BZ{MBsPuEvr6~L zSw5tQYwSLs)%H;+ZoIU%9H_Fh1B`S$yE{r$GTmCWR6gU_mshT3sKb=S_tRha&)Z|C zedo>Nu-1gu=y@#(N)RLzA(IU6TjT4{)?fT=gTOZHD&q=7eYjyQ7S z8~@!WcBp~hKmUzO;8itkrNo6Y1+jz}#d{)x-nD2|u>bF7FXdw(9(tAcb`jwQq9;^9 zLG8J^wb1g-l}f)}+jxab&g{NKwbxC^M)ydzYXkPGa@Pjq>LLL^fhiY<37AC}3$_fQ zrXkL`+qZA`UEUpPLSoCQcQHtVxA~b`PB!|hT17JZ()IL(o`l)hO;kn~!pAB6Qf^%4 zXxDBxuS)WeyUEB`6D6I@ zij0w)VZm~Gk(&MI+l2}u1AkW zvIJxIjkM%5_+y^3utfjX`w^EKA83-RKenQ0P@jLYN5^mwFs-ch@3)!^8 z?tPUUZH~_M*iiYUWutc=cj7w>k|0>^F0Q8ipTN1dYWCHsUSmd&uIImNamVHgj=4QJ zCvboexFKKxL5!2@VE$2df-I735LB~*#|qXBI80^{!G39oFo2jXX?{M@ybh8?tBBBz z=}9xuV^%&7q^^a0C*B+u1j)hdJug1zJ#_sV7L^BdCrG=zJg9&*%mEVg;5B@NYCf5V zlBq{_iee8z13I2H%57g@Q#SPt0CkY(Ft|^2SUYSSm3c1*Z#&k~7RNXkp+wL%IcYii zJjC(_FfXK4mJbKvCr=io+b$k zn`&hjya6+1gX6I^QKZRd9&!Hi!LwKGBzCu&qV}H_2j`QV2iEQ=X9A#&S5IBL5=33X zH}fM0@2cOZQKBD3V|v`r*9s@1QyLL-ppfUu!Z(|LSkG?oZ_EN&)~_*zQ+ z=7j!SaR>J=sXcPZ2X~V#qckM7F^ll}O{%Kj*LTEdp~=mVz)1NMeol~po=#FKpxMFC zu(q?9)D(avLZ8`Fe&DNEDDCDs1UtjLid2xg8N90ueFyfiF`a&n+4>=>bl?FLEQ!^T zq{q#9!A0ESQ-SES88-B;S|ytayw#@JNMbxqmWVt$aQVXEoL?e zoGjvR9^UE_@wjqPU|ilG7;s7>;31Q^-8if0&PTb*gpG2tekVt50`w$~Nim+bj3APM zd_w(9d>i1LZ~7iPy}IrA{jst6oV0v<8hNj5hLDUThyAr#cPm++(}XXR>J~MG?T--y z6~>sCyEcnVlMESJ3Jp$_Im4|~$B8abDE`I2qCR`#1EtCtaZ~vnF2~KfaYIA#tY(bO zWT1Bh56_!RtR>b(lE|Z6A*G~}^!59w;9(7BofH%ns$(MnTbKZj zmB#cc)IVXi<#cE897TjiCA13!CrdP=|uO(Vx(CL-u;G>Oou4(|>}u?NpFAc6?CY}MJH**N$?p@L^^P9Kl4%jQuC`%S zreukMYreFW%GH*e!cvp@=B7@25~OT#1Nf%voXb0ioX8i^v)p(k6W`DkOw zTR)|ucJ$H51gOY(km0+)PM9m479RNJ8MEU3unuPI;$jb1W-miRl1KsipCGk!LB3Km z)>c4J`q~0oWs&hq9YT*?ldDnD25LN;>VKEzA?6u%zZAmDCetx1pVh%?cB_Us&^{c%%V~S$^3XYFnryxC; zlQ;55mAVGv^|a7RF*t*xk(;SpFIXmmqjZ<7Lq@h`98|a($=L!hu!=MmixT7SzquX& zMF?U!x_!aO_EM`PfNXPd>w`uYx;qc?Jh6p7s9S>-RD<~A&(7{E@U7uE#O(?<>>aO! zqVSRls~@LLYX9V&OzO+gtjSdg2xY{3XLX50Yb^=71~wD~jfv8rODx1z*QRJ3Y-q#z z1ACY#*u*Mj#tmIvi#9$D=Ju=3dEwdCKVtg>GgZS%4c%i7UvM7KDQ@k&q?h7-FJ4z5 zdjLf0?l5yTERLqqb(bCEPB!l5!^Ytj9ndpJ5Iohz$B(xq)uJ!53(lP0Xkp{Z(c&?s zL2*{;ws{EdafvIJo96`oKjIanq-eTLUZR_@dhv=++gq+F%H+p{WZebi=-QNtG9>=G zpY5QTRRra==+d>T%nsP^6CP*hHKZ5l%$Nc)!L4$RcRPh-?K~TQqp&;Q>F-~rVkAtw zjZ-?9rC`5i6b*ajW!HOXUA?S^s^+vSy_2Wx-Fq~*^APOMy5ycc$yKPlXOACZm%)wJ zAVRdsmqgqE08EdUkBQFd^m~XL-TL*baU%xExHxlMk9Y+Cm3kW_v75Y}e;X+`0$HW} zsSP5-Uy0vWTsED=<{ZmF50?l%0mQ>40cyNIml;Qe4)lSTka{B^ErC3eJZ**}}t zZqiecpKE)B%9fsyp}{YcAew){KOiYdfP_V(-2_%cUSkG%Zh`6Ye8%~#P;^QnfFq&}Gva(ruLs9IDMP8@!ENB%^{oUn}F zQe8;YBDtpW#YF4%0!mc?6h>PH`zQn}tL%yBX&n6#CmBUA`$gL;H@ypeCtVT26A3?y zlPg;0I8_F(TN&^qD=)GW8z7%U7b1uS+&AW~Uj-?85f5Ny3pRCg|Au{I9A(c&Lq{&)!cSw+{c*;7`Y3L00Je zZJ$tU^EV_U9*vGSTM-C{1f-3vKWegM^D$&hGt5#6!IM-WMt-UcP-cf zn1DKpG;ljL1x*4DF|i2X0Y4J^lVOxXqy|zKDc&f!d?!qVfrcW z9IZFsDx5P~o_RsfnwWE+Pq zsu?mOL?h!8VGVpgLfe3f-wRHjIkS1LS9GNGv;3v3UmB5aVm?IWins?M5Xh|D$!p?s zVOD_!KL*7hr7+RZFk)d}BIlWLo!&g5VD4d&dyRncraEwanmQiHVY%+l4$%lJhYyaG zn{StM_Dig2;#XR1(Sl(SIPiEppVp}K;4?8tkr^15x28cSbs84YMj(T#UaeX+k|pfN zb_&f}!8uj=5rLQ_0k2|JQiJu~xw5kV@6UB%qtq0XVhgkrr8~cY=}lxFu=(| zv3e_RFmbbS|1AG0Y+~%ZJO(nZtZ!(cHe?OBO2q*iE3+!=86r31orZtj*!uX8W&iLOoFcE-KQB#2CE<6$Ugl zssr&ydo8_ryCYF-!-#(_-dj4uGLd+vE^;+FGq}x(hz?@|$oWwOf+wHmdZGwD@t7(A zrLLEq1^edS@NZXwb!6s{5lLO03smF7D^I}pau=7C)d1E%U7$mBDtR@Mjp^pLqi|RQ z&bNrv$Pe!mkzp4YTPAZ;Ol^f+X<;&Kll~%Z6tnx>zPS0v#Qbx7os?Itlu3dqP3eE? z%$XO+lx0t&GeqSC-#WYiY&zV|6X83WmgG94$L2JuW8I|NO&X@ZckD zGnTbcdJK$RU}?EcXJPBE5s^ApEmo*<<_0YNnC)g%-Sqk-z0!b#Q7Jcf9Q$^&%k&95 zdhI^;?dxnGodG!n_4$&o)o|iH^p&|t+MItQQ{M1?Br}nTHKfr!x~3)#o1Zjfntw2; zj=6F6(T&@-B~IPtKLq9S0m*vhARO)un3Mt?W&ZQsF%(Q2@$XCuzXNRN`}W|8-X%>H zy%upj0v&4y*G+L?vTq}XHaqoCqF=?1J3H-)nFQw!985~#UW;Ju$U_I#FHA`~;XuPT zAoVv7%FLR>Y2JX|Y5g7^Ydj#Lp1!{E#Bju4+ltN`J$Uiz)jmK4Wyg-A=mKinXeq8H zlWOG!{;jR8ox{;(P``d_GPKHn{7B?@pw{HSY8V!bVj9rIX6c{u{o$SbpY^CaYXvLr z;o9c@FLlp^moviVO}UB`)QPs}SoX=lmS;a*Y215Oejq5Enf2(SlR`NyjW`}14~}+( z3FhLK2Y_?l8tLH%HJLdt?%Leub9kp8XxQa2hJEs64q8b5#uvh(-R4%9n3>taxN?QV z7iVi{?WZ__M$w?{&~2)i6$KKj?Ai0-*N^j7k_7#5EpzVyP^NKrztI}srw&aHw~Moh zYSPr;YLBp|im>D1MGmO@N^|?asGfTEU4IS5tOKm_Vy;#n#Rf_B?(Xv>Ug%V0Ckr9O zbD5jehikn&fK};{`}Y1z1NK=Id_h9q1{RjF@o2kYR~q!d;=#1uQshTp2skHd0a6NNYGVb1WpoHAz@y7#|PsfU6V`FprRhMu)B;0_!hcV4wVHxqF zq$EMysl0_;v;xNwvjB^a5T*a7;D7jkBk1Sm&GAwUSP3mu%p-NC89;#*>Dl%+r@a$y zsR-jGEUIn&mYn9ajzLr0)O_K+RP$k>dM2MQ*B#ljWxw@pE1v>%p(7X5+}z!Nck&-T zY-DWb$ypD()XN{m=#tAwn1abvF8`N(qeI%oH@>XlUJ(3J-m8jc@yWkvuDFxrp)7Hf3tP!u838uR+$&7^)O})Myif@3_i1VQMTC$t4O~4 zqzALrhQ4`n>KJ3o0rbj5UzG5X*>#)wOX+kV9wS8mLk}V_mYcUXIrTo~= z?C<$obKec*iJbeKGpzU0KWF7GTl}W=U7T7ZOAZ(zB#)em1M-8abw$|3=bn<+n(sdI z+?P#6k9NpDDn1KxqCT9?9$H0XGCy^z`0LX?UY_dz>}WgxMT@2`YT8DL zuWn;xWHiF{CVA|L&JQ%w!cG2^ow3zbq|FALNamz;uhZ}Q9?ZUzT{p3Q;~TkMYGy^%#ZXp&#$j%{myy zfw0%UR_$RnS^fr%8Y#1nqBt_Q-Pzlb48QEz&+jvmKU~{m#>4#LEd$PbqyLY5`Qk;) z1NTK{BbQ{ZX*@M=t!2tLd~_K&Wy2@21G~01WQS zeFJ2eIUaH=-}V(aY+J9n``}9sv40v)oX3^EuNS%wPHV{A9EZG}H3gSDmhN9<~P{IEWKbjNGt5i^4S zdE&(*lp}*~ypK_{@%4!T3=0Xl`fd36@Bse)Yq*Bu>J{%3PsI$-wpFVkz)Sb|$N85g z{`2t2Ab^I>R~}C8yQc;IbQ|QFsYqmYdT*y2c_Y`mQ8&pJ?D^F9!B~wgI#=DVT(UX! zRQcAcVVzz@`#UHUf#yfTdj%TZ;;<6(o?tBFvE|V$T|6!coC4F)L`u~|x;5cUtFf*N zr_-vX#?1^$S59SaVvso&PcQGvwI5b4F^4Lz0k? z`hsAw%k3uykjvt_wvN8j%=6{@cKH=n-`Q`;E?zNAz8O<$; zNRnfye{Vsq9tD&nqJ>A)mcSah#)T|_8`S+bA89w2MOK#Ef1-6U%ks=*^>^>-iXX6W z3;>nb5s8E}$PJSjW!v>_dGlo6mF5TA;Z#9SkkgPYrqQxQ< z+>sY9T;Ku4u*s4Cxh^c;yQZcl}9tHWIW-&*wIu?kw0D<2qX}WT`1J>!RMB3QzLykoc8+)OKWF|qJq$(NTBY-$9p1aiDPmoZ zfbdOfm(5YO;U3fgt#FEBLls3};VYi=dSeFXBL8Irm?dQ4$L-tqhMl$l;Tnt8ldr2i z*+|1W51jSwk<4ay=iV~kpV@5DaQ=h-9BI}KySOozm*!S5h%pfaD)gO@j{K0jmB3rQ zA08eEw{!>4>rmcQmj0W(6UiVG`x$NDa)?E7-LsmBX6b7VjniZo5S_Zu&CGv$P@IKb z19P%`m;%-t%LiK1NJEx$IF^F@N%_M zxq+4E6=@91GY-7}y>E@DIZ?L?WnpO6Y|z459wwKQ6@EORQ0(!8-`;JUz*yYGJT8ZN4_$fWuQ*O9@^ob;wLj}74An8rt=ja29$;#?{r8qUuXYP2L{42;@ zTq9*n`%N3w+wbEZV2oIzdEoV(O3TVN&^$r!>w}-lp9Np1hlh`T!#$7~e73##&=C3} z=DGIzgBv&Mn}51oH*C$CDC4KIGn;Zr$aLuL8WRI`j<@6g=I6~mABqupS_wbAzUn* zI<n8FY1;QPBw_i+0k8nH61q(jU?RT@Vr?d9K>$Nd9w!G3n-YmiN z5yp%=H}3DFH&1VTW^{BgK0{n=(e~B)fI&?K?p;qRhrPLBu~ecB@|!8ka|# zd`Iw!;4o8Yt=s+lJvVL{$zvt_Ylp2U=8U?}8r1pErUk{f2l8zF-@Gj^N;!B}9G_O}nrHc|C+^vvy{4tPayGUs z$#Tzh?jJtSoay5}I;32dS;mzs(v;f`A0FOvqPoA+zTpjjGsa2J;&bkcQ(pN2X2~)) zK=f|@rITb)>YuioG-+{%gXp)YIWrtCKcA~V1=N~K6xFDQ&wsRLmu=^xzj!yij_X#jPIwI>U4Y-l3 zQ|4@BABQ-9vl_&ZGyG&%gxVQXyXX~?U~T^S_8_)yE;S;B%R!!UB5&QKGa3s%l06x7 zezQJUG#lsrKYYCjT#kAB{(sp=wi&Vw*`|oHRH$TW%vf$~wvv=3*;<5TlE}=Mg*3S( zJ0X!MqFallnIREHNm40_wAsrN`oGUR&-eFy|F8f5cfVfGGiKC%U-xx=KIi8=kK;Ix z)1`kgq3WLf%MG*VHJz;61jr-UWHfy+LDA96qH5yf;-rSYMGRjBwMFv&S9)K=VA*<{ zVoRv&becE617jrn4gmkk)vKwgspH8jwsZUdA4bB-9)s{o3v5wr&$IWULU6<&YR)IL z`})1pYZlgTw1kXX%7XxtZ-@Tmx&K4UZwHP=12mO8PeWj(m7t6V>NEfli*of4lwX^##z0as*hvBS!k5qM=!3< zTmHMG^Qe(e&bhRIzPVyCg#<^eVmJQw+ftH2pIPsh!z2yCbu+-k?dZmlTR0;W!ydTn zo+Oz(2Ciw}Fa2YBHlRT^P1RXgdn`)EF>yU7v)XH-iIuaxW$E?7x;fVjH_g6qyot>{ z`lj!%^WQTA(WJf#Fm+AQ7V}qkRuk4W!}`CTW!+F`s*Oz$JLCS@h;hLwxodmVUo(hq z$q<*`hK4EM6M*8ElO^W{V#(y{`;}EM5XEn;yxl^Il#MMU0Bs6lVaUywrRB8ly-U^8 z+u{g24XZ=E55HT*@Zs9`>FcGiXYEWTi(BozKuN0Iak}eEVx8H(^m~aj)MRv9-D?9} zmf@yU>`xL!abm+;OvRLAdq~KW3Jd=qhH`2lgx;Y~Yu=djp|1lfmw9>9Tu?CrC6=jq(Ldo}8bX8G`zvq*Zd%5A@F54n* zj_K+bMpi_IN;dR!K#O7(xwwF2x#Joq-7J%IjmFg~FdS|%Dp~HLIHF&V@k_S-K|PQ< zO>R%4wrx{(aC7?f=rIyqc)DFJ@2PrJ+eESXioWbQpoF)kO&)fHnh7M8FTb$Q(m8#L zR*r|WsUp=oj)byN$Mz{^b>D9~0z(Y$_4_hD2E9s-2LBoGxC;Z3tfB`ythA~BzRlNP zha!+L0XKCbe&xyG*IE~zFUsXH* z&cu#sTvaR<|q#AAz|l`Y9i-=tvc&o&i}6~+3X z3BxHbRNr?UH+F2w%ioschS<({W+;K<>B(~s<`<0DU4x+Hbl+Cz5Jw0cqj9YTbQJTI za#AG$Z#nkP=!t-?QE-QpMBiueV`yp0swRy;adev|A0x47 zIiabNvYD@0Mrx3W-OOcLqHy=kWjR;`xyO#dt zMl+5FOO1wQi)+XmV+YL%F`g7~A9N-mJlbS5EbuMg=NIg3mTy=qT~il7lo@T@xUm$q zPNu`M3y&+t`v1$4XG9cp3A1vhD2b({;uqgNYNpEI(7uBfKi@mo-Lg9OwDa!|*44P@ zx^~HE;Q!Cj6_2lfi-bP%w};{1pYEjikMuyV@4c%Yc^N5?Uh8_*t1%_L0O+n6DdWH1 zkaU)jCI$n>4yqKo0c=sSM@cd%K{)EOz-d^DXZ$l8#52n%)h+rxAQ#iL1i_?3L$)Ue z`Y}+Fgx6_S(K%N;83rIUed@%HY$hb0VrokJ}772j~2nhiD(ad+avhMoVeLq{^VO&%VBcH_QIj$jq^PMO7C_)xEsr%cc5l5DqJ zQs~03lII`q;NK&upkPFHs>Py@_Nn82odf7CezmWa;RGoYID%v;OV%wp9o6T7LpnOK zLch@a$g;~I``KIaxz(^YL6XuC2Sixfa_m=maRKROklm+;71qmh* zQo})1|NXG|eL)R$2QKt(V7MacZ9t$&vTbqN%`-!*?=|UY=ms1r^qNcuMPYddM(r^p z6>Jg5E zO!kMau5K83=qp8cQ)kYeP4_uWmmlbKMe^rS|6mmbgkz`{`UKc;3GYRQ1mLebi_|)s z(k~VvBM3|JlAKS}&YXRtk~`%4l<1170_@j+JMEirBZ~S|dgR#lk7t0_zaNw9K3Mp4sgcy=aDRbe~ zb))!aH6WHoPy?Wl5+w9?wvICq0EDk9_z^j@sgVx#a6o#xAg>?_6{ZV85of@n04^;C zz4h+cMp1N^(N(zGyJ1p<1SZdIsWX$GqT8J`ss4g!QuhzV+a|xcGi~w`_K=*3sD7Am zy3hRp2K5&(xB7rh4}g1LK*jRfwr${DQ;AiGBb|9ih%FI|h{iJ6zojP9`ntNY$jdP8LS8;ZQH#3DuCXa{&Kg z8SCFyb~W?G`|xs z1o1=%Bl$bg48SXaG_f50j82OdS)i9k_(Bg}?8Jl34&J5?F4CJ?KTy1=wS?YbSlGp^ zNo@*HBln6fu{-FAj&sln8$uC!7nPi&bws_;pAxqIX&Qg!uBYLRFBy>LmgAuxn?l}2 zl4>l?i8&&$c&=s2HyAiT~RA4 zqr$o7L#8w~EUQ}pOi4)2`Gz~a!i6xT4x?~Uu+qbcEu?|F8Oj7wDOzkm1cDM{*CyXi^Kply0P2MF6w8ebeCcKZ_IgiIix z^0XPrxCle9!xo3{wcxWArQ#7K2$2Es=zZDq%F4bfLh!jo#@*&nZ&neR%MgxCU$Z7* zzt5A7ei#P7UlUX$ps8TzXu~=>OOlj(X?5rTS!PF21Rw9wqaDC==a$x$Noamr8$fn=mQrS|Rm+8yN=!J%*S9SG@^=A8P94bqg> z0w=ZTY;h}RiodZoudv>5J?`b6Y}cflJ)nx<%%kL7M9#Bz3&qvO8Q@59v1_}I}!-k0>1zsRBiVUfdyq?i-NUGF7+Fq=rN^X8uOU!{Tw zQ&BZDHuL;WGG;oRWh|Oh0U%Z3?VWyln22{b&cdA!{YWa{v2zqDOetD|7I`w?w}dy70m*LqgBW#q+g4%B`e zGkI85Flg#QU^qm{?nJ#AP(3FdffR~aUu0-ffk~x@QsD9BV(SO6ACsxs|B$MXz2$y? z;LQUg=15Bu$Yt^K=l#EEU63tGfhi0M$yIKb`aZ>5$4rMASahkaV%=pgED8TTn>^Ib z=-z_2o?UHbcGz5MrYIKsE?rt5(x`>6X)?^i<#|O7l&JI=VRZRgINBi;bno!X2>4teXyOSPX9D{IQC zHtE@|KTrvuB*p|3(z-uRSCU%b@tg%7JBY%RAUi%I*62CXfO6Cq-lK&ed%(}SN zGWFr1TjM8B&K4@Q=KQ!_I|^${PJV@~V;I_49@K;lPZ}!4lYk!ufMJc6!(48{{=W`h zDh7~52WZ%>o0K%ps~RYVm+?F}PV4a!awZ`U!dyq}F{k8dHS~o)PhIHkMVA}C`gOj! z|H!{OMpF$h#Kb5R|2kxj6TtRQ?Msii*Vi$}{3IC-darB&F!2x5o$gln8;vfbh0+vG z6^LN=$_pHYWe+YS?qB;#1#tLpQ(@=)_c#z18BBbF3n}G_)lq#t7Y*Hsuwx~NeLXU+~fZl?{xNzn>McG>{-K02_FI~Dcs^-+?b~=#(!)|f9 znC^S~=FM5XF?7s>QrTBW11N(7hO-ZHmv^t%L>O?6rILycTtU)ASF91_UWYjZHh5^G?N8n4Z&t;Da z(6Gex^~Q+{bf)?8&;i7&?Ou-ye4&tD<&7ZQf>%@j^ixwsXAiX*@|g&)(u`eQ?&4j= zh`UVEv!QZ2s-qmk&!crOLklMBdHABD@Vd&%8iWzq)FB*LIyz5Bc-V^|&)!qZU zWfOO`(Vnwj9rbB`W8GCCMm88*qJ9f1UCSDDzlvdmEhu4B>7M=jJ#roTM!KYJ_~#Ij z$uxPfOi>sxZ{+@(^+Y3}u_3Mc*Yn>-m0BkG3it9XL%Jyd7tV6WfM+5nw+&FZj0(sn znK?c^LfxiK1M8R7q*5szwv{@Kqr^e7+Eu7oHplAuLy}HwbGNS;5J<^ov;I@n4#s+& zpj0}}B^*ak7VZrLz2@~zbFo=>4e=SAjE&AZ$*kE;4*sPZ&W~rGvS_1wy9M`|G0x?B zFk{L0Y*W{(&ZJ!vj^2(lH`VJRc?6Y_2rtETRhH9KkbTqG_r#vIoja4=L!jFPo!(IY z>c5ke!l5FWfC6A@(?7;+%gv=T{CMNlXmQTx8tsAY<$Dd~y;*GOAC z_#majD2+RSS?5;&QEnv+E-J5a3Ud zZ&E^VLe=j6r>V`(wR~~SNGB12LfX2f1a&{oz+4bb?@V$~fuqR@GSTMDI6S(AN)6ii zjPY~uo@1dw?!ZJGM&fgf14x1yr=o=STno~xOJ^RCHO2iYoWJOP5Lmkxn5!j0#S|9_e4FeJxL^^xA7>x()us~4Ifzza#?Og4hDigO;b?AU zAEL$(`gKUrNyYj!oX!Ygf^h3mM}2J3s(JJ2VkKRw{j>~pJ7?ia z2GUI?FJ2IJ@dupJ=3t?@y-q&mejX9Rs~4>m-5|;F;BAqt?m`b6Em=>`Wc{8!ml8c^ zC3r==5ie)U#osUJAL$Yo<@C9uXNV}`xpRCQ za=Z^99%P+Hxm6O5BaZIR0Gt_!7X(;mBm_H4V^sLC+rnB4hYv8Yru4{;H*k^T)g014 zl2_X%|1yrE-Frprq|>a%{ch310ny!ii{k;!QBQ=EkbTsX$G8;|0qXT^q%#S~mnBA< zkZB#1m=!&zAm_9adhZ?dQWfi$-vQ@b!sI}9o?TRpIVZLxe3gl zRuwI1zwE6Vtf(>|52DaRO-$|}L4%6u+I95pZEnfeh#9G`QloSM`(? zN0Bko)E*pQBFGoxRd+r={UmK)B(naL4u?UwT+tGEPtZ|P{z5OHW$4-IY)JPKv*Bkt zIS23GPo2QRilDk0V$>(7zwwP$p<^8`u8rqmQw2VYE7FQYBsrbIU31tYL0hmw*XRr3 zoRgx)6s@XkKEd-jCD6YZE*N@t3jC^ZvwZa(oOuDEld2OSxwG{$VJ_I|Y8QLC@cfwi z)hLOq0FI!OEUa{tZaYkMazk8l)%X}5YozET_tD^m0jZS&+nWi-Uu>6-b6b?``kq@0!E(WQ`zNR1d*O| zacDvA#`=x6kQ(A2e0+v_!Jbc34*i&xXoima#$eZK7V4U^h@-(fcRqc3%sLR2L0wH7 zl^ey0pmAQmzP9t#n>$wl(64l^SFedc(vKfKI)wESqVs~B{<`uA;8o^e$_-roEN{sUfM4?{kWB&cq0o-Vqhs=c=tje8 zPo9#-BSBiEYlc+nAjRhQlwyo5Dg2=A4ANi*2^mTLGrR34Q(T}H%NDvH8!5Rm2bCOy zEhp#{SCIIrqi!T$+)oyk8#rv}&=sA{4^L$DNsF)sozcoJ)E+L(r$mG?4w!2?*bTt1 z;;Dy8ar~3Lmo8mmRZ}=|niu{$UGYDUHLt(EKSjej7ePZ55pe;sGP? zrbug)5VFWImU5JA+O#Pq_D||3ZulUSy2Rr@o9wh%ECfNAUgO-MY=-zjk z062tvb0$i**e$IT!}H|2QKa*(=xA`y-ohV`g1+}QFt}2kGkoaKylcHTRA|3}OGT$g zj~{O@{g`FG?(m^QB$lP>ifSqV%gU@YZXKqslCM<(R7=MOI}_>O z)r-j;8Z~bGVDa>B1w@cTt$8bocDeR1=@ z^6`tbQ)r6VaW1}R`Z~8N3h4MqQLb`uUR!mr|KWWu_Sj~mW$l06G4eZQTGB?pDut5>)Xdw?G# zp-Ky^w?KOz`X&wwr93mJqYKMPOYdCzT-ATZF`Mqw_6YdHOasnSf@7nOo#LX9p30uf zaVnrOr1IUG%9y3CEHWv~_UYC0jAO|@E}ylI#D6J(ghX!rCFBl#vSJ;@v1{eEDhwd= z_bmXT-DC+}UWG&wGE4MJUJP!b^86uV`{S7Zd#>tU({x-0@{{K$&MC8HcqeM0f>pT- z@&Zx}A^bM5d#V4-_2hPUG#gFUm&HlNM|J0RyUA=Fi518^U>c|JaM;Bwv)&%?eb9hH zP6=uYVzmi(iG+v+*VN-ws@o*BZ*G{S--{Bijs)l=9WlD?+JzI4tx`=u9*)a!gy{>q zjWNy!jfe!kTFkqXnso6z3xQH9+92R}w~}{jwIAWxQ2P-CCs(jJS`chFY|I=Z=`!NJ z9Y#kvl7!N}dKvl;8QzP|y6um%3ln1QJ9JU}^O&!5k_ODZdJSxVxPEJ?cO0%~YGGsl z9t-H&l0BQByqVCGJCm=I?QDF}uYTM$$Y43rL{bl+NxT;c8xuKJJ^b#IJ`wi(;xAPoWjPQeyCx3UIXiSkHB_L4NL5sO39tuR0{rTYk(eP&JWNc z4#ZeddSb|grKIBQ$s!jYh9)fg#It&eXCPUdpmUUvL>Fn6+^5%+sEg9HrQQKlgNo-s z!3z~b8%Y@5=RD-jUQbVdo>0kz=&;_?!c$^SOc?)Y;SKATZ{COy%Zoz9vfseDP57)c zsTRbNCQWi(jFTR)`VIjh=dUS0DFo%2BH;f4#Ngj+16j=3#dJXmf^u%p8hYYP+s2o}NyCN--6xPB zU7Ut0S#l7wqJO!1v0h>0$477N%lf5@^*jc=8PEke`|Wcpd2gi}B*pq5?pnJ`tFlt_ zKfk`eC1puRMpZb#OR9!cUzUMSP1GrrjX`p4aMtsg7 z52O)z%`{oQ**r%ZAmX}Up`gS>*p6s9$&7Zr~4Q8h$nKwQ1#nu zg!&#tTHew54bs6fZextEN76279OB`lwi>^N!cJ&&E34K)C&_%JFry_ji!C)B(K(x1 zE+FsnOuOdRTj$x^YEoA(oSnF}tKwh8?IK?UTAO{&QMiiVgSybj;g%~B+y@UHsH1cn zA$H|?4I~*LSEFCr%(>mhStz??UjdK`fJcq&>8&pxk~A>@*{bdup*+$aTzoJ{v3|G` zuTND*-*b5@^1YF~1$jN5*AwJV1Oz#nRcr%z`dEUwbl7o?t;+QA0>JxMQ zM^@5oGlkApIc#OR2ZMmFa!;eLkdK$sNT9T;yACr* zCua{ku{7W|$%wYtH(>5r^d{@&{WpJ3j=_t%`0-;*zF7_XAdvc2=+dBxuIcN0^_Bl;ZgAe@PI_74i4O{cZ)7!Y4L%ff*_PZ;-WOSipH-k%}Tb4qJ zf$>x&cK&NSS$NKk$_+nqL{LeoPB`)0OZ@!&2JtpyE^F79Vz@#U zScY&Y&>57d$RkQ3o=fxW(yTthl#wJ0x&F)4`ikjKK0c+f*-n`>z8N|PAe{=Oc3ZI2 zIRJJW5znSBes6AfzPfkB`iqtC036QwE=*!-N6!4*&BM<{J-8G{{riDxc!tK!-m2G^ z3)nq-ub!fK{5X0;7$6IIz3cuD59nVJOhSJNpd^hUN=h2D6GYysT?xOq*b96Qy0O3L zcqdvQXT`5L5@5n~*D%y+&kxi-Tlv8x`51?}w6;dVWS?21ea1@CjGZ|=htzwArFp2& zlpf30XNbm%IU}T_eZJ_!?S1HY*9BJxuN4adG`5-^tN-dUY#7a# z09Obb`nD^K4ppuZZU%amD=tfijOA(L@#M{>wW`a5Zssbku9x1~UQF zaaJE!2Xq;B|KTO=t+Y97B1Q@!YIP5l&;G1^-4D+}Fq?0R+>r)LildT9%)$2T%j8US z#__`E!7sz3dHU85zoyNBynHEoB2ns7P;cM8dv|1Ajy}YqJ8?cuyG*W!^o;~~*oQ&h zoKOG$`>%lteD(Aq90`@e@RF2`k2*V!ZK4z#17%3}p|m2d{dDu3@#ae@B{MG!4lpQRL0&}&zjsFQG4n`d6cJ8U$je0@sHR)A<#Uiq$v_Re} z6kAXl+*fIKwC1*H|OR#Yl8@KA> zW+eGgzHUN2#otj%6A4_x8d640AkbacoOH2||Bypt?m9a2>aT6jRmMSoBWL7D6HSvO zO3DIh;M4W#y7t8e`n$ZCJ|^iSxvDU}FhxhG9#il@phL$NX(h6h?kdfvaQPeQw%PTv z9w+5fG#h91IBu`@neynvoC^Ua@%LN3Mg%>T{o`TLm32@>3&%LM$E8o+@o@v@=s=@5 z37_bLj7Jz3A)~;dGrbSLquL#U7%*I8SMZ|;T3IrdaO@3q7d!F%RyIT9LJTCU1=6Sc z|Ml!D6x6PoY^OD)2JU%rQ|QwZ*$XJn!f2k1l6?#%BfoK!IAjSpqa>|*IkSuZo{^kl zXXz;N1M!eEI_$)bhTFq;Q)^0Fiei3{Lnb$nDFC-;zkU7oZF+SHJ(X}c zuQ_mCEA}%~xNs>s_7^+|H6uM=?P+_;8%W0lsIyHAy&=m+#rRhQrdjV#t6?2kNdUysCrd|0+c^eXY$q}^)QA%rR z9x|wtLn=wV1@=-2phe4crD?dyXyUa%(LCP@)1ItyZ($*%zXcjBQSKWHX>~CZp6 z>SKTKtT4rw>GWlDK8fYRF^|T0qdFDE%{Q&udrMh=-CW@3N34iev8?>osiOyxO zEH-Mv9B$l8Zr~!+Vu}(o-`+MV&2h5F!u161ysxAzJkM`S5rNxH{O1hmnjsAa{$3(B zDLln6+IK81dIPcjZ^rR){LIAx*IrL(lk`A3n1zz!V4;POq>AGM#M@hs?Ou3*Y`9oU z)y7Nxu=TAp7)e_ky>by`Acm08(uD#Nai%hxu3wnjiE8TMAzWSI0t(YL6{oLyF&<3i zb6`+?-FDzr8Ps*6;FC5R3dZ<{COAh)b2lAhlp6QD8?Z>Ksf+*ngEi#DbWwxhh!W${ z$5)%N`7l(W0FC!3pkXNz-4GP{9qwGS9^Beab%F22pffQTUL0Zfr`)`GQ+3V%d^7ZV z)UF09l#UzeP1xM2%SwWkricvh!Hb_{?XmzGXp8QEU6>5`(I8FRfd{;X%-l*+^SNC%#v&Mamhvyro+`;is2}zVr}B0F=M)vJ6QRDu8$r z52UUL9V*+m(|%^?S1Xr$!=`;h&AZKHG}|;a+q9TUVY7bk9zFhfa9dG)ULZ}$3FJ6g z3Dpod(tMLKXw9(q#oN?T6+eu?gA8hC$Q)-Oc*K#(-M&^dTQp01rxLe#*#v7(dvA_X z)l{-1n(d1gF6T7T9{A7xhu%p*H${&brp5&qHbQ{KIi4_$hz503 z@@N~3!H%fZ#5}GG6}h^&veNzB1R&M%(q;|baPOK*sWF856d{f z63IrFXpHXXyDy>~us%4#OLF$-ey-|hmSDu1r_1RWm2}!#2RVGjyF{GLNavp18_cQ( z_nbj3)~p+`SrKXYYzk0m`xO1roKIMD8JDh4-PJ(Zdl>#mnv8!sZQ(Huh;6l zhQ9n&;;87n=u_6r_qH|F$xH5_no6eq20C8aa$X)}V@H{SgwSdKJ+6A1bi?d3kVP1d z*hOEc{?;giP+DJ0WZeahlm1AS41&BHnQlj=^Pl3Mdk*2CZ68v>xOw5oCmEsi-$)0r zP>kaCB%V^R_WC&?J!_BRS-yilWqGgS#*(@Y13T$011{8UGJWlg49~f36Abx(s{|UFW29u#ktJMa ztP$oNz;#kqfduu42IV5L(D?l>{i2My2-*dBh3V- zdhkLkA$s>#rjsD-E6=9*c_p`?aj9b%*p~D*W0PfZC_3Pm8UCgG3cfPCrteA zg8*M_Us1m=!PcTt(AOx$?M8U%cIhHcGr0vTF$AE_8Dh7-V2cU;(q1zs@xh@M7_i2Q zu!pc&Mku|pvKE7znTY%|0ZR@c;J515op{m~FrYAe0Hr9Vh&F6eG+;;FO3ZY=t6dv) z6rM=1G8zB#a{%3Rqec5ZzdCFErNf60Lr`ri>JZ{`+JM!n%x%J&QHQOKjtTxBQLGZ9 zJ;D!z?tg&V)iwl@qTb?~lMRUaAD3WCmmnb^UCQ~TT->1Y~uWHwMzDW=HGKCbp zz5Dd(Q*fmWR590r|5leAT)y=+Ew;4$hW~7{79*<(01!5aiuFrn6E^FAz8fP1 z-D>_z!D*a2Rb(WzN=8yAH);!-tNVj2gR`iCLOBEa)HVNbwq^0)9#zbwD#5J@h=447 zDWGy%pp>1uzC^d(b!tj|m9(kQw<}E6dw%vyv4HV}SPBYJ&yu5us(P zWN7I*Te%>4>8?0o&>(bd4YUSl-ui1Dz9Rrgf0rl9 z@W9mNijrnRz>~O-AwmY=X#~TZR3rsyff5OLg$B*2jii{AqGR^g6oAW0 zDoql8lukMeVo*186b6Q+*Q;BDK1_#GK`9=_-AILiq#(R5T2^-E=vlKf-74B)`2s`0I66HUvRGQ>t^3baA*`sxc2r5x^v~q zDDo^@o+0v*5f~9%X-T7n^7(f;Q%-VV4nccQaff9j{X5AW5P0R$Ponskp!5)$^&k_u6DwDQ0wfj?Oc%tn5`9QlY~Kw|PJ!}cda1z?el0_3$M|(d{<_ua*@)v zi$riF+3ym@$O~Z3XYG5(vXPh04jSgmNCiY|_y#glR0)>Vc%u4u!J$-o`f&1=lg}-i zjs-JuTlpB`uqbei~gV&2Gqmbn2?K3ffF!e!RCsU5y^4xk0si&U#TkdiqZP^ghJyr_WV z=v@v{p^A$;IR;)8X?GZqJ4?VneNvZEJn0pqXLEUl;ijmcs2cLh=AqCNZ^Dl zzx*TPg}})CcGU(TIDJz-^z~b&z)KG zPNKIM{z!_#W_M*PH3|{lBhFsb1Z)|&&^ezN8W|?Sm;A)HyImzX!&U2c?CA3gS+61( zCO}9K`uMU{XzbNr3Y3;Bl!T!{2oJ zw%5?1A9t@LUaD#62{*z*f}f_rT7O`Gnwos&d+*`$D@Xia;({1 z&N;jN&83bH_+j&(wdCqv5As`enB=Tv()MXe{C=QJHnSMW2L2&$t84W3fp4__wHRYD zdj-g{!;KM<6VE*6!ZRXKcsYl<=$k08U%x&xXD%m%n--w)&vdy_M%EH5DSF9ggvLSo z2y1RjWlic^f(@|{-OxN7oFb4G4-Vmjk=dHXZu#@i+g!{Wnp+}qB9~l^&LO7@38YaE zCunW09cm$Nb??>itj+sZS2XsVX2o@gu{ni|(a@o@q0gk(2#7Eof&Cp#MPEAe!SnQc zr@FLU`7f@z>fpV5-KcoE>C3^jd;6h*kB_ljULZy7u4MSbgp>J{7>?G@iFkR^d1q)Uh7F+KW7Y7LyNg!XQHhEN3YAf1sx7EL z@m3y}coWE9ap0D`s8IaNS1yl_DI_smbMXU0c$J#EucECmREBp&A&5ev90vliK5~0& zt6xe+LeJ%jkWT{EN&^#Ua>A4Q903)l&P&xLD|RfSP0u5lqInE8Qkb?`sB#haiWU_W zw-)=vi^29G2F?MZs;fAYu1&mAGyGf7KMooAwQb#69sQY!G(q~@u50^GYaiuF z7OGedN)v!26aa}gSCn@npWzssBq$BjsP_g*4Pz%MP*ap+F?y{!nqD5ZcZb7rim3mP5O1Gd z#e|02z#tyJe%X$zE&E8RqNu0?ZOJb8>5)}ftAjt?8LJrnD1E9b>1^O6TAQyyS$4Xu z2919d{-cAYVR0^&Iz-kSC*+Oy)Bdcd_}oU3FNz$swvQT^>gMTm)`eJ(%Jw8cqFV(! z@%EZ7pUZ1wLphH|)-{-Bgk4*e3%a2-y8b{Zovx|ma0mbCs}cdS-_P}xS4U_E63d|J zaAkG;dpUJxmRLn#ZkT&tU@vA#97fAv$+Jn*^yL`*U1X4pu1M_QRoKwjxn=0sIO&Nj z4M7_1AtO1)Cd5oW7a1A(?ELFAx7-9=jOk&K#+C4qI!cnI6cgl<(@6znmoQ}`lw@$M z+I?eSWY_r=Cc-@*q3F~a?sWaO?xXxO1rX6kqC76abmj9kX#IMXj++uj1k%Emz5qq4 zso*4faHJm7IqZZZ6u}iW-wm&u9h{9k7cHi4e}Gd8XZodX&QdB~xehuYRv6HZW9uL_ zJ9cK^rvQ`ewNZS{Q@v#QG-OfH2?FH&B%#uDQ@(md9=2+bmij94ee-r-D|5{Si-y3L z!(3Wr!H%ZleoGL*izGrYc5o8u^Fd$vutJy+`ddJsq?Zg>Z=ThTm&(%Qvhj8YQiEQG zmSwr~6P-6mO8GarFv$?f*i4N(Zrx|s*RIQL9|5}i#KJ?{FF(GpK4}f;wD=(+FymKs z$1|N=D<}gGU+8e-bBSf?xQ8_dvgK{BT*(2vFgIPeq^sqMAJLiFYjRG_`vu<+_c_PJ zYN{v>HRe|R87F4mo}`kuV;^bAuSIDo`5k@KfKIW}EQu`T?n00S5kF}$D)y8d1}^qO zrURHBqAp(wL^MfDd09xK(AcNY`;|;7dYL`@obhwLFc@?LhHQ72bB|m>v>Mrn?w%pE}BTkMu0m z@%--5&w%qS`VCdOjS;9o`ci;()>aj8C*7ONEi+%phpP2qrg5$wV&@0K_6GjZ6)1?CMsNFKN>dk+PGLrK0E> z(Gd`Wq*eMoFhZWa`oyVI15RQTpDZn5Q!9|R^((4v(y`TWSq3SWAW~Fn=+OP8=h@+^RIIMXcNqUG?C$NE7QqJJ)UD3IOr)Woe z3zxxeaCfe!YzpUsRo7q6>m^?-zU0Z15L8ya-b9~8xzW&u>M+oOs||^hM_W9D z-6lOw#4##tz{IH8@k4Ju@uel{&3)Hsm3mxo;pFa$t_zu%NTT$m{cO@CEq3C^zRW^~ zhVR&c(zT>v?pLMfNlYDGYJn+KYBCnRkZ*|S@}<5qOFjk@@xH7ky%)1I8ZDxn$#Jq; z_wn7T=sA6%(K(7f@MP*vt2@lG@x8(WRyLg^y&`S-xO-O)bc04O8UhA{C@y>%jWOh{ z-)UVaoPU9Ooc~vyI_?fjSDd(o(uGaW(!k93 zM1 z<;Umu;vSNj3A!$#dVaw{<4S*R0xl^8*Mr7(ZtAF^mq|*kC+;5_AKC*vcRqM7SH3rm zea{W*I(#;)p{F-!Hap7-uP#XpNbwNvxFodG(>p0WNi5|H3s?0N&Zp|mv~Tfn1#;0b zH6;=IjRcPXT?!{^s}28J3lm@K>IhOn!{jt6h2N zS>7Lkfwo-O#Tu@txLxW_?S-)~^lO`G$ongAoW)ZhhIu}Qd;LCmYr9zOTN?nXu5)Q}ZyUVFJ zcVmR4k77%Ny2UA5g?9)r;mjAq8L$wi(0Ew`duAlx@K6b_E=CZfWmYC^zoghNPsP+@ zH$e3mWH0egJ^(wQ2*wOr(qjo++WUZXR~8Opoy#RR_t<@TTYJbr@$FGbLz37kljGdK z7Qy!r@J1OcEsUZ>+Z0QM41o#}wZO-ZC8{Yx)ZQ<1-3D>1LV#nW5@R0BTlv*1I6k?`ClK_I8yIWguLX1|96d zu3RgXYG;hF=fVa_B)~bK5mqWA`=5XDGl+&+EEA2j_99?amc0#>Za)Z0eJ$pMYn0SL zTF?q<)@Teuvd_lT5v*UD(8@xVZAw>mi?)iQ-J&uNb_+x7uTh;3F=3S(KH~w!EuqXn zdx*S_a{V})OrVrtkRH%`F^J|I-*<*Pr;lUFc!2=c-;9;94{UoinDw6e3=bu>TrV|& zxT$MS-Xhtf7VXbZ-C*`IUk~9vU+?9kE7E$!eK!M3Yw6-}tHsEg4# z2(55s7KjRMdkmU6NxS2OY;p`2D-Tvhh}JY#h59A1ncQM)x)3Fsn{jX?)Gpft*74t8 zdEAA%g3kx@cn^zhw{Rv15Jb3wyYeia5+Pf9CiP`3hqT7y-c(t9S+O}U#gZEMB7)Yz z+_VE0%SX~i7UXp#D4z=2xTY%-r(_Yf<-eS#je}7D!$NJ-0^6*ds}EQVPO zjT0&3Op&8YK}?C5n}(4Bs0n;{>Yv;r^}wHA%*}Pm6UPc|^$o>bmY;Zz@{j$_!T@jf zo>;)pBtvXL)#P=D>)#Y$O4klKl1Pu)vi($3!6+ZBDewDC4Auc%POn%EZWE94Cs^gFe`=R}fYY51B-fN|n z8?HTcRBDcQ_fGN6MU4P(RdkukCe%W!*o1eo;qbdd?=ZG-2mw{N)z_x9z> z(|%2xwQ7|OkMbNM(*L|R?_lnS&~noEg+BBMSmc6P`@TcAC2$>{ep_~oRL7Y2D+lkou@B`vQ&hDKdqIvhOZQ3~Rd08p>+V^r+SwC<& zkr3>FyPJ5_%8VJTL;v|cg1<5~vvnE~tN6D$|Kgu!yUi8_3O#_`vcMWwhm)<}roa>J z4P!G9B8lmgGk?p>^;+~0!b+l+6$HWpKbReqK7(bvJeHFddXPsyL=a&_WSz63A&k#W? zf~6>{1~uFmNwM8FmZNxaB>{tFyQxX1&^J(3Jf)&e{r@cuNym--f91`sU&@4+xVX64 z1>>)I)F(dp57S?Lr7P;{)!UoHX&Rd(M$ z8Yntxpp_zV-Dk1bDX>KrD@4>u!?2Rt6}9NblQVQy;ws6Y8Y!6EuJZ44URSPy*XSIgrv#n(Qe|rP zET#$fl&}b#)l1Qfs>3KGg%Fj+4mc)-g(Ocx|8WlDec+&#P6%*~ql~zDp+axE(nrXt0EQaOOkVu+_>V(5>3K$j6OdhDrNcx z_op`u7D|#sB5MJ>=ykCf_p|Vc=zzt7FJpU;x6oT&%#iz^8e6a%nUzS(=l4tpIZDkW zZAj3i+?xBgr9630U2(BAmDVW2j3D3yzwM-p1}e_UTS#52FdOQPa1}lKC5*?y+Rs@# zMd(8@w83|F24@&QwHwPHTB-FNeIEV9>1p|_vIT?BC_ToiQ8N(Up3bg%`QpVbVs12z zWuE|YC4&}>5FF^r)+yRN?g2ng7*rECg@Za0FQ*($Rduy3fT1{33h62)-fv{m3`*|o z1C{*RtJIlpXX}UlAz~zmB+{)tr5~Y*q!A@|;a8tCv&QeIW%&Z`7i$fYRD$BD}@JVSQ1 z<;Sx05(1cgwZMc#hF&y+NLtC}?UzDW2;DhP*v$~8APw5+p;~(T&_?R6aLFVRi{rW8 z(YWwaJ+<{@wwNt9Sikgoh_2nrRjXq4diCtNFejUM3xd6agNQP*bNkj@hha}zC}{MN ze*DJ`j;iU$-TnBz>^X7lU`*wi{P`@=Bqut3k|stFY2}1QF?Ja4t_;swi0!GAu32#Y z5)?Tmm!ktq8bKSX%NoC`{nGd()*T_AfQPxF2S4+vipPZHyUT0yddRV-DT1@24EwMf z$+@^1%C#`8xZ++YU;3U_`FR)PJec4791}&cG{h`W2pq;lO(;P)v)Jl0cqrB?2dq|4 z%Bc0Y&ZAUPM&J@B<3G$IYswx~lM%)kdymH&ZHGQ<@CKeQigMZX2|sDr5G_{U)R?GU zyZov?uS4~8E#`k!#ziBw+4hfYTYYJ<*_BpPQAH1 zP{5AN@)!7W*ys$DGS$;8p;D8}*6J{*ckgT77YPQM8M_2&BH#TH!bMZg3Y1YY5Trd+ zOx*k7@5U_GD}kqtQ&)J)5um#aE>zYYUqI_8Bh6cm}eF~rpy(-WyeG^L)2eV~lDoAxs9$YaiX9@yF;nUgHr1CsJWTxXlH&)WqhW*cEM<-H^8dN63A2$vj{gHaq-bC}%S9ro z5+AXZo!>$=WDs%tI|rP6`sG&(pNy{S-%P{Z^&qF*^$y!2=bzKJ4|!ZDuzu1_7hunp zwR-WNhT;tx)RoFZ`8;rV13af(tl7f$elnVeM#a|vMUXpnvEGLzroq%>3F!K5S&)eMpmk+Y|Jy~FQP+KsiUK#rGsWdHdVwO#*=`X5<#s78_Gvvl2lSzc~Ves z(k_8Z1wg?5JyUbzJ;m#w<&S<8OIgH?QiuRt?3F4?*IBy`M3Xyf*8M@*LVuE@!`Pu8 zU6gZ){y+8Q_Ga2o@Q*~r=1N_`n%E{%V?y9Hq7NnfuwjnW%B4ESj=g{c4r68GF# zH+8yZyg(tD_P!tCo}VDYFx0`7`==5Rzm#r#L{h9&=xn$-*uLHtP7As0Gyo0G01`NH zmJ_iDs(Gv)MU|$-f4uWp#l|&r$T!u1%a&IgH&Pau zl7S+l@0nW;wyG%tr6{p@kR>UMVlf(sO)v|p$MNF(B(obh{e}ph(IAk}{ z57w9MDyN&42AKDL1C&g_JtDvYjp_qjLden^a>sT+{C#r3 zBfJ6HLb4t!Joa@b=Y1uW;--)~6>`0_Ru>t1P()t#0+e`yQ#xb*U;VOir(=wB1k~S^ndV}9{^nBsb0(ujkW1@^JUAa8A4{`7 zT$tc@JE%&eawmyk$Zo2}5hh7&M^$7~!6ZudHs92RW}WSD7n0V>y@J5T6vsmkUc*7~ zB>#F1FvKi5v6FYr4*lXy%Liy(z*tMtHpV!C5Uyo4l#EPpldn#HT&$Tujdi{e?vQQj zFdktfOTy<_S4ItRlgFunOBbatmRm`**sb+Z(bPXjv!NL7NTDrc!?WW)^4E3ZrQ{tk ztQJ?CDt(@fnDR^7(`eE?$Iiu7ZxJSwJ=WyDeD}UaBG{^BO3nNZj zQ!9&#cA;wFWs@gCowMw8{?#M$?K=OdMdju1be#ite-&in3d)f*MOOb^l5u?%5)SOg z>E`%N_donuIY-)WAKWrP zxA#APY${6n`Sdrx9cuow|5sC6hkf-&(ygWwkBn`5MBjR(e^8f^*L`YJ_Eu^evuf`JN$hy}x1PK6N^JLLimnNKQfJ>`E}wN0^M{2_$PAv> zL8gR)DSKTVRQH|v`OpQ7b@b1_R@rf%m5)csb~~r;Uqp5Mm`lWzT`B=clN1U-~6)maw$~2`>tH_nu9?qK}|oY zdFoyUm;b)oNJ-lFXqTRHYCX88Xp9m4FN>@NXO=NNJn>6^7)b!&WiyKjno6SB0I8eo zW8Ui9Qxu1)N9(2Z<>l}D)jV@0(UwUYZnce+yghP4_I5r(_M?5hZTydi;w=U;GSbYY zXq=*Gg7C)WXkS8-ZQA??XkAUeFBfIqbY40?UP)n|En*We7$kw$008b9)KheBfnE=d zMjR^gGutiOK&TPIGK?T);KYu<9c1X8=Gb*@XyWT%?-;&hx27M`0z2>%Ia|=L%di}X z+B2(Ebd^vBr3?sgD>1EAtto&1^MX%DD}HD-EQ|n|dgaRFw9Wr{r!wu0v}Ni766@>N zu4%2dg*YeOcG%HKG4x-uJLhxps*9l`TDEMt=EdJlgpTHz`7KnSN?vil%aU;vrwRR1 zHi?|~n372UX#|j(sNA1E ztqJP>g@3hbspcG4Xz~w$or%S29 zE9vf6k}umVr4`;u4p@F?#?y=8ORf(Hn18IUflIMo@*^#cQNmUTILq77>c$@1MujQ^ z7a{l@b~I5^u0WRyPl;_u>cE@za~Czvy?a)83SORXyQ5Ca9?KL14kS^YO1m;;oaKFs zAwy^y8m%>*r?bFv$dLP?Q7o+o4XMy| zPB8KLVu!mFNfjn98+G$-5d)NG^`)YsHD6|7sIIRMne${q5^&Q~QYe{>JO0p*=$NH% zNe0UPHt!g)_XptozN!y}zqAtOg4KTN)G0SVv~%LpC#)TzJ{w7}l$JT6YwA8v(G<=t zudftNLRlodj^)oDU?a2`L}Kgm>h0Sdk&&*Nasi6jiz|QarTDAQ(P*f`5ne0GVyeID+2JyI#p;0kO*iYM1Drsv&e{U|=B28O~d{h%LbFo=m>u8=;1|h8c0$I(Cr^-Sou zIy;QFu=x1=K%@{8)hDrF0NCxBIS@i9VSeP72xwh8l3fuca~?yAq+5=R*S5-)y$XRd z&u5}VeMO_K_AwZ{=KUZ)KYztq!s*bOnhzhw2}?GodDfS^WVnTeeJ_^0?)re{h(8^u z&-+X-0D+3&6HO;hqg%l^T&JFL_r;4BLHd|=EwqwcE%hZe=BtYJRb?|&8GMD!2miBU zk9;Fx*ZcAHwyLSLc6dbA5}*5&uJ2BeA1v(AuJ8!n%dG^#lDp*t&YCDcgg6JGq{d$# z-bSbkUP<{|PIXFh{CnU;haE(z27JU0{ZD`(IcergGA_5CyPo{)18#;Z-h4uVDBbxT ze_Nj0YQ&fCb!vn>{aHxw7wirv5B7HWWL5)-5^tDVU(uaP&wg0I_Q5fY9{>`_D6k2c z;^8d)(pzAT>6Q_8RfZS(0kVcvQ-K@3y#M9B@BOyzWHZIz>NcbHa87U%5SsL?@4P<; zQ7U=iC0e&++RoWZEEl*Mo0}QP)3~cg(2LKC5e|$!?j|Asu+qHx6iX?RGWb zhwz1BA)bHUXPR2Zr^|XXa2%`@4*m#2D~!4*A(}L4B7-}n_YJr8W#t)lAEE>gkn{C9 z4ZT{Nx@}v7%z-ZdPkCn=R%N-qeafuuRNI|XirTPHNE~tur%W?ll7$K=BF;GCgo-(! zSZQyX(-N2BkY?h*pa?RF1A7x{qEI5B;!q))GMEFP5b*zsilp-`9Oz=XIXv)oiuJeGl|F!V!6ItHUZLBQLxJ+z^RH%OM;s6Q>ke?*R$TpRCU7 zjb#ZFdvl3=6wmqf?ef%I(RA)qb)-(wnx9guCjYte79Y829ch3l)v|Hc2BSG^JIl6Z zOBC(3#Nor>Gm1{hq%Vh9pC{Y=fIdft((am>mk4+?LLR7Z^xgTas$bi}}qS$qI{9Jw&z1HG=@ zD>kDh(bzZNdWf#*5DQ|$!f}HJ4HDsF|8%j8{vN&i5C~Thur4 zM*ofE7`6~pl*~lR$%N8A*z1T z&pgGk*yH+k{>$T=QnwhK-XB#m zGlk?{lFS~uD8_r zyXrzj!uuWxlRMb#dAg4oZtO^vcO>GtWIm|$th|R@^`M%r(1YInrDyd))JjL~#y9+$ z-WlmW<^736TPJnm?e;W^M=VW)@dx(qeFCu4;Vk&yvK=6mGvFoC;=l6RE2}8}8<$-u zWHB|#dukJqJ-@tkAw-;aK`DT_WCVrId`4cRbq!r<#^>7D_!M%on|Pw{PlNp>(%P(g z)sgVN)79hp?eXC8q*OH3?5a!A@r_^cY^t9Hb{xkFL#o|kr1j4d$65kt;+@VVzcK^n z=)CHDKEJm06TfNu-NxzLbsB~{Tkpd7w$?|D6nU#b)6NkcM9k}^kKmR6Le-xhrjI63 z3`-T~tp$bGojkW%>!>|}T|POG6gz@X(yvc-9jwqapo4%}-HqCmW z=hjE4fa=abiuoxIb&NClE2)z~_DP#qi#vDis%Lw)Q+L?EnH-NF(9FZ<(h}A`m0*GtiO-7ZUgle|L-du%OB~_ z^}5oM@KN&$xG^ts;R`k{8_9R3EG=M!+}-G2n--3-2`E5|V&Av#&5#%{0kW6&ch*?{ zDzMG!>D%{*ueAyI%TlG*H<~Op+{L16yW;rMeh<~{h5c=3RPq9e^LyJ_@*EULz zpWLn{)7M$wZ%7X%IxU{{*^xBAm(BPODj>4SrH6M7LAC^t8Qwd4bXl1{^lJgUazotv zu84@d`Z)}Ox^($*adGPWV!!2Cgp_nhaE@m|7UOsBd{o)?(1d30t8Rj6wNVJstMDxX z;EQmeydar5=c-})svBYEw~96E3&blYvCIbjB$ z@ws^-9xtCLJbNv#Ukmw4dgu9MB9;tV5y>#A`ReaMdw>(jWy1!G=B<}YX+ulK@#C12 zOWcT1^ydfmoDEKFXC%fAl&gaS#^I-v(?NkTYIRdO}{p; zeEsdJRY(fz`j7*7;!%(pG7qPo$p89giG`S5O$Ug?25O(-r>;X=jC_gNx{N%dAy=>5 zW%!4k&pv+g#PQNR>xT!9wIuRnMbHE`O>&Qkii(2!?#i6|7H;^+4drwbRYHBHfjtYE zbTzrzY6LQ4_T^L`1t%3^qXEp7P_3SD4xEJ6r7QVGt~FtepN3vmxgs@*y1dkDH(PF4 zOz}sQAIg1&1TfI0Iv6cY+`rl;{~8$dWT)>4(^}nM%(k&l#>LF>EUG5&P2WDF!!&BW zCv~DhXscfK<(BIZ8L)>7i62oHWrST7`meL?(%vE4nHUtRH{;NZo(-LpnD#?EVUoM_P3MM7NGwrhbwO zr(U=5i2)N2TWhv{(f;u7v`BnhvOUGv!ASpAk! zB>epI#NblMW9;#QoZ?fgRK-mvKC{y!@mR`ipv^JeAH z5^sE?%v%6qxO4UYX%o<(irroPONH-Wu^ji+zfgCr<$SdMbNnL930?g|zpwxMhgF47 zFu_N_E1j%uM+Udn{fkxU0>VRMZ}Dn=Z*%7_9(W$H-eSUX-)3!$ui5l6)2AaDx<_y` z?SmleMW5qmsDv_;O6qiMYFaCK?c!x2VH5Ocl{px7Xa>tJ z;$NdXXlEqwr6Y(Bzn=q-frN7u>6<&93(a3SckbNpj@Cnxdob`@Jwr`(W!uswL(^=Y zx#NW2%%{+xG=LZYn!_*b{S@oD!U@;#9a8qJ&L#JmLsk~GYY}~6+gow--pM*eqlLb8 zJc*Q~cgqIWeBC|g*pzYY(5=$O3|s0qj*(hv%K>V38}5NYCXZ-Vy##F5D50}}6~^I! zCNYoUo*uQOqU@Pg0lmnykC9V*JHA@;`^l^I4MLFz^K{?qQic~#wC$XmC!DfQNzMkB z&G1uF(hpD9JpTflEMI_DyTQoG=^FExU}y%41?qu=&4`R01;?jvUjkClm=1kXCn&)= zrg!P6TVDecf3kEnreP_=nh2&DC)Ng1u#E;MCnt+v^ZH@VU`mt0G!SIQirRL3ykE|l2Ff*s$xLV#U)*Ep2tsT+ zWvX+?yaYS1k z<#njIuAN*O`#1Oo>_9bCxU)4TvL$A{S)DTJ&#$!1ET~=cKL0C`1hxMA4=ssq?Vz3u zXMA%ajYaBw`+1<5d$)c%uwK&_Y@PwPQz|A`^o5j@T+yoL9qgOqIXJ#)-N$2S5Il=X zD_COu>fZC8_T)+BFf0E9o1^7N6e=Q(cW5@<;&04OqzzrXvu2bi4LX=wVfg$;?W8fr~#PC0Hg zksd-43O(Cu=_e=&0lPwB|)0Hd2FC295jsjscov3>iY(nrv# z2CuaHG1WP9+)}`tNE&Mng9e>7*~?v`>NOC|Khf(Qys8B-LC|y^pH0M(MuhkGtm9g< zMgsOyG5mUX;`aSy+55=>klDGiBt}4xL;EY9J&*A-Ia5HeZV#&JSq%+xj%3qWs{Ty1 zvp$*m(%du>is3FU8^cp-e$wwPx_OCx+TkZnYeEJI%J`c!nUZI!`JM50md$B*5BoML zC|lNI^@N=BHNWW6OtE4K2&C9Yc+kj8WbpZa#6<}%Za?I2k%HwvcO*%s zz(1}?jV4f!lxJ#qKok|?ic))g=n>v0HE;a=@u$fltI z^{ILga6YFc-LH95mrL5*E$JGnB_YefrE_bp@cIvfzp=T}IH}VD4h?@AyHdZ&%@X3R zI=|Lez`rbz-3dr#zJ7jY;Oe0xzw1}0(}l1WzrNVd z!Au?^ia0ljoM&82c|wDvje$oul*j{*1%D9m(ws-npZb{tHj{SMkP{Y?qvaABwbzys zMO+IKS7o|Sy9-87`Lh5XJJ0$)B!yv!LjRT^7FIRxq;`OS0fhA_OVIo{==<0S&1lZH zeSXr!t>s?E%>;ZJAaWu3STq`2pRz?QR=<_r)r}HS_%KL%re-gr(!ClnvHH%~T<^e@ zEeeM11ShLMU#}-lOZzG>K1sb@Sa_HWD-RN=8v%FLur@V+H2d`lJpvML!gR4*8?FW% z;8zcOzx=DR9C@9!aIFcK0ofn2<-gkmq6Dbf4k;X%sDw|s+ez8$)r*?&{9v|M=qb@y zVYM!_FR3o-RC6@J59;&5OK+leAQcURf|z~$`SKgZ@~gQ-oAHZm)k%2%T15~2`M)B1 zu`iy#5(fM~`Jtt@;8W*5ShIF*Iwbe%neS|8<||1xE~_JUwrmHaiO@jA?Bipu=aL@6 z?~nDli$q>fxktt=0)y`fCM8ojHubigZvmdotfUHk$Q&cJ7G<7B(PEwCnQ?$UEJ*>0 zpdqQ#n%I8VH~ZF0Af4O~Xqa;KYJ6fv)m?FkgvF`5V?PY0ohwsm7D9IMxqI^DNzE4{ zOEuEoDiFZ@Pq2%Ker=QFHl;fEwxI{#6e_~Z@O+u6igv#G#sK-bFh z6z{ke8PHigqU3pgjFgX_w}u#K_A;|}7$SFQU&@(2g@VSR#CL!~?YY&Gai9B-ST}kV zHs~?urX05%KaA!4(}Xp;vv-;=aDHRL=OD(?<4g2SgzWH>Y2Ir9EUq;3Ef|qlmsZ&J zcE75nVBZi7BN%On&*DBL&LrqYq!>mv`?%#h z9AKorn+=c_$FMa``(#`OrNknJSIrd{Crn0@=rBnQ@|h&moJ?ahZnF|X174p8QsA%X z`h6+pL9S;K#`ExZ4Mr4Ku6UQj`U%EPYZ{VVjhP11kmDw(+;ADF>RKLb3tA|9T!DZn zi$Vx816Z$`)C6q`@aPB&`99`(R23sv=6IJo!t%E_sx2+%h9=O+%JR!EzodUx#O$GI zn9?s^pADugc*pdF51fG#zV@lp27hd@EQ7f3$*+g%A^iTxjZ&7=Eu#WB&-3(b*6}m1|G_1YPsnf z5&gC}jQAK7LylJ#GelZ5Ir%qFdI|uialW5lp~BUWYu#;fE6;;v*^H;g2`dyLqPk@y z+Kg&&rTQtN2Q7P6@-BzVq9!6(OzJu`J#(P`+oo_f4^tMf?T;|U+->|c|6Yi6_rn90 zZfpC6Qo?&%tHuOORPvdsv%7yz69LR&{e(a?J7V?FMzpjjq zthg?K3cO0QEi@U;&U;xKn8L%zGUgZTE{YQW99`r5WAV5OssGC)>!zbNfO*otPFXgx zOI)}7S2>Zts`L7z27fKZAV?FEI@Q-U!mLldu}40Yv8T(2>g%m68q3HzsN^xo3^&h7 zU_ag?F>~b*t!ry3`5z_GXXMrIXf{P&?v?E9gi&7<3R}>u$rFCC{&@ZrOK$4%u+^X` z9jCL}=Ckj@aJ(pgyu0$)jQk6GvH?ojL2iT!RkcLQJUWS{il&2OVGe-pU4qPTJ3l{+ z>R7^0vUOFJT8Z2DpV`p*2q;P(G}MlNma{Jo!m}rnRg`2)RtK2%(JAC~=OE5o}k1b>QeCEQF*fI+%mf zUhDgG4|$Y5gf%f>3rMJ$Pcm)J!c70X?|=boSR@ zeWjsyCT2Z40v3O8>%qU-CK-(S@;bH5WB8d`E%{PeumZ)+lg}VW6&~Ir5p00y2D3ig zDMDk*JuGJh^%bu@S2Xw0mk^TkcIy>9)~jF0cTc3cROU2CLdey&_g?H9Q^u5!OrE=@f*3DR?P?^{n51FUKmt|>a&Cln z(zzhpS!_}f+5Is)*Dd6mJc0hxByMMnf$Z~BR@Ehhj=o=|B5QD?f@L1V0r=K=Jd8|m zh^Xj!3l<3ZM>lD2gt>2AIiUpe1IM!feH*|0a=^-kO>`;p*|yA&-G@7gm=v+y%$%$* z*Kf858mXc~d}a&Yn!r4RZKz8jZXKhk^x}g8s(4Kof(>(o7TDX_k;R~n;thD8DOpkR zxHS?sc^GLHZ&$>nV$Yb1;d?cA^4+H+)@X4$BKF=HRWZgy1*cJqqrWJmaGXIhBI(`w zGVWZYpa|+LP60*jn86MtEO znBsn@*JdIcm~=JjdUNPC7Z>^!ZT_t9A$;Vs9xVaUh%bI{HR^!fWiR-3s6-wuJ3_9U z(&`o!Cg+WNmPa%8bae0DbE$`9hHFbb!`Nl9Y3LM*_PM2} z2pySh20QQkpbGr`ZvW$Wmj~?N(yiWyo@&u?__Y7?ly=^m9jnsIfQi=rTT0&E?~>z5 zVc|axIscFOfJ@u*@<%wl^Uqy82)jS};Dd>~=CEIvp1@U+0CTZuJAMJ{`|PcJxM9WZ zHzt=peLBkAp(i5f|CG1&cA$4tge89qvu=R{u6k{m5mp+Ir z>*C`j;10RB1CO4&PM0KQFmH>tWK8z0(i^Gm^VP}^3vmI% z(!Q;o+E3)Y_A3U_r3;$lsrz&8HH#Mw`VcpsT1-}^yWT36+cCvkTcg=O@bP2L`MW(w6lXT) z4Wv>Taz3sjV{vF57ca+@i@PC;KmFNv$|o!&Tn(u-5TBv3h#TQ7=hEb<2>I# zCapudCJGaq2cO^O@SRiAGj~>Lv~39%86nN*XgD0cKcA_AAv-Q0aFXsPy_$DH76XOy zG8LOBL-b~#je8uPTAnbeAZ-uJ=-F6h*11>U@o51c_rEj1&O@AEXj5AZNTY6#y7*=f zHo#_77;21dZu=R$40G@W?egnQC>we8J1W+6anHBj`hM`)wEBzeJf?iobRQeg98m^s zBqeqhyv|2s9`)J`$eVj*yZ5y&TUVO{A9TJEgfG(37qchFr2N%8a(M?2g42BkOR6WF z)n{kKk?^`lr!@cePBON*zRU8y{AbX2{>2_V@u6f{a&X0QZqqxBJ^+IU-E%O~ue^jo z`FOW=izYG>&iqCuWs*1-Y^K*5FWHFz=;F5b1~7#>JY#oww*rkh0W-Uu-6n5Q#`Xt( znGa^n{@@5DNV2aj3_Ho~`o!7gUK!D=N09Sk=kxQsFLckmIWp5P{>Dg%Vx{>0 za@2cv8@vBR|9hSuk3GB_j70L|J1Ym;O{+djBsVneQ?bBNdG(s3N92#b*8z_-G(DvG zgO5OSC^@~Gkcl0l@#O-hB)a#Mjy*V@PFTmiY_pXp%n{!SSXPk1&^s8MvesMvimuo@z zrmXMI$G-c=DWZA%55Y$6qam&!cbi6*ukJbr)d7edqbnwN9-1Cf(((M#2Y9x4Ke)@3 zshg2KVgujnvP3z+HFF@s*-?d%eu@^S?nz7GL+8{v&IuKYP=2Old!+lSs)$vi`&QCs z8eK|h+VuUH_NPKK!#;8O=gd}5b;$wOfgklBF8T5dkl)$)QOl3pdpGzz>g8L{e!5ne z>7D8P0}CMZTJEB8xE;z-D7T3E20naj@3WD9-51THDIn)LQMA=E1KQKfBs-&Qo$Mz2 zGr;5taeV72F9JdaIY$^MH@pack}P==yZJN6EP6@N45JD!4M+(H2S|=MdN(ftdlhbkm1`KbvNIt zj4a#nZ073q9@!*;r_arB=pb#ZTY=_Wq|D1q8)$-P?|4>~{oJrH=V{sJ#fL{aFvKK> zexCn*qL@Y}8jNJ#OP=L!bw1Hy1*9gd#vhe(#2yV9msuXp1k(}Y%Py2TUc^|`$BANe zclg*xmYibEi`Cg9Rkwti<}3;rO~S3)=Tn54>pgz+n0gj};DLm!i;KRcU<7LImQ@j) zLGITGUDcc&Ae{GzLs2JYL!pex3o0AuT)X31xJ8=bd+X!ifhp7);%@~GOUK(-s>PHd zLBNa#Y7>or%@7f*4A7~jady=%cDRuCCfO&|BpxJn*q(>rgc!UX(@!y>MU)cY;<1qA zEswe3oRdZFF&>u%>T@!M7p`Cq%SlQve76S0Jf?|xtnhT@auD$0l{u{|&&{Vl)eXlV zQWxEU*yPjw~T$| zOdIcXQpnnmNq)t9R)K`mfI1u)*?qsFcfGn!9i9+^We_ za&n0&86)2${YInpRB#9z5n;TRcftRAP0Iz#yAs(|=6b2AsirW`8GTXV)3r41BWlQL zond1?Wwt@nB3CRiA7?CQb~OwaNo|d9v}=n5Ypg;7N$dPst-*32PaL9F0DyKknkC8k zwL{KiILUgIyvp;DlPcd(w_yt_kBnBcFwv^HF&Ro~4Ya7-`+-E8l=mKYiS-VpAUsG; zFQvB0B6uaw7^d18^GsNKa*90PB%m_45r|MbT0)9kWXh0{R8|*& zsSwDoUh50>+vyCm(SN2`5*ynnp{~W~g{o=mh7B7Ik2|dq-Kv&NPpJMy;j!~wQJY+I z1G_q?U28xMX z@?5!~a*vZge`R12Y9OSLBUKaO7Q=<=J+Qjb2mx51@1}R9Js!>+crWto=$C#gjeWk^Ld`zvm+LNd5P|*Dssk|o!;M?Ch@lyMb% zIk#@PM+-lrn3m0r?D0ghkOMIu9ZWXWK;`>XWW1sIK~n4BO=Q#xM5L#zH#aRdmcRmt z#PEYPwvMuO|NX2Zf=qQ0X768>=KSjSH{9N4a*v5W{==fcCaDqD(PHG}UdRmj%0RSR z43_C#eTo9tg<7`tmF-oeO5W#O8c~Tfs4CJ- uT23K)^IqEYkJ>=yOYiNBp2(@Hb zflaKPkUf{>xWI7Jwnh^!5S07g)o-v$QXk9f9P{MqCcu^IY(ji@jPdZZxE5hi zKlPh#J#uV@tOKg@$1>L`wa2V$t`PE$erSO}11_zx_G$u$PzrCsJVDZZ`UesmSmmZJ zYyN&yET10m9}APH>QeN-jha*~ysG~vv5-#K^B?BF`v1ufQ!8IcxKP`FTk4rD`u4_w MpMG?>|M)Nd2Y2> str: + """ + Convert an image file to a Base64-encoded string. + + :param image_path: The path to the image file (e.g. 'image_file.png') + :return: A Base64-encoded string representing the image. + :raises FileNotFoundError: If the provided file path does not exist. + :raises OSError: If there's an error reading the file. + """ + if not os.path.isfile(image_path): + raise FileNotFoundError(f"File not found at: {image_path}") + + try: + with open(image_path, "rb") as image_file: + file_data = image_file.read() + return base64.b64encode(file_data).decode("utf-8") + except Exception as exc: + raise OSError(f"Error reading file '{image_path}'") from exc + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + input_message = "Hello, what is in the image ?" + image_base64 = image_to_base64("image_file.png") + img_url = f"data:image/png;base64,{image_base64}" + url_param = MessageImageUrlParam(url=img_url, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageUrlBlock(image_url=url_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py new file mode 100644 index 000000000000..2ba4a1ba1c78 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_file.py @@ -0,0 +1,91 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use basic agent operations using image file input for the + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_image_input_file.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. +""" + +import os, time +from typing import List +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import ( + MessageTextContent, + MessageInputContentBlock, + MessageImageFileParam, + MessageInputTextBlock, + MessageInputImageFileBlock, +) + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + image_file = project_client.agents.upload_file_and_poll(file_path="image_file.png", purpose="assistants") + print(f"Uploaded file, file ID: {image_file.id}") + + input_message = "Hello, what is in the image ?" + file_param = MessageImageFileParam(file_id=image_file.id, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageFileBlock(image_file=file_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py new file mode 100644 index 000000000000..fd09bd81ce21 --- /dev/null +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_image_input_url.py @@ -0,0 +1,90 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + This sample demonstrates how to use basic agent operations using image url input for the + the Azure Agents service using a synchronous client. + +USAGE: + python sample_agents_image_input_url.py + + Before running the sample: + + pip install azure-ai-projects azure-identity + + Set these environment variables with your own values: + 1) PROJECT_CONNECTION_STRING - The project connection string, as found in the overview page of your + Azure AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in + the "Models + endpoints" tab in your Azure AI Foundry project. +""" + +import os, time +from typing import List +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential +from azure.ai.projects.models import ( + MessageTextContent, + MessageInputContentBlock, + MessageImageUrlParam, + MessageInputTextBlock, + MessageInputImageUrlBlock, +) + + +project_client = AIProjectClient.from_connection_string( + credential=DefaultAzureCredential(), + conn_str=os.environ["PROJECT_CONNECTION_STRING"], +) + +with project_client: + + agent = project_client.agents.create_agent( + model=os.environ["MODEL_DEPLOYMENT_NAME"], + name="my-assistant", + instructions="You are helpful assistant", + ) + print(f"Created agent, agent ID: {agent.id}") + + thread = project_client.agents.create_thread() + print(f"Created thread, thread ID: {thread.id}") + + image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + input_message = "Hello, what is in the image ?" + url_param = MessageImageUrlParam(url=image_url, detail="high") + content_blocks: List[MessageInputContentBlock] = [ + MessageInputTextBlock(text=input_message), + MessageInputImageUrlBlock(image_url=url_param), + ] + message = project_client.agents.create_message(thread_id=thread.id, role="user", content=content_blocks) + print(f"Created message, message ID: {message.id}") + + run = project_client.agents.create_run(thread_id=thread.id, agent_id=agent.id) + + # Poll the run as long as run status is queued or in progress + while run.status in ["queued", "in_progress", "requires_action"]: + # Wait for a second + time.sleep(1) + run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id) + print(f"Run status: {run.status}") + + if run.status == "failed": + print(f"Run failed: {run.last_error}") + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + + messages = project_client.agents.list_messages(thread_id=thread.id) + + # The messages are following in the reverse order, + # we will iterate them and output only text contents. + for data_point in reversed(messages.data): + last_message_content = data_point.content[-1] + if isinstance(last_message_content, MessageTextContent): + print(f"{data_point.role}: {last_message_content.text.value}") + + print(f"Messages: {messages}") diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py index c0cc94b770e8..e3edf7243c26 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_openapi.py @@ -69,7 +69,6 @@ instructions="You are a helpful assistant", tools=openapi_tool.definitions, ) - # [END create_agent_with_openapi] print(f"Created agent, ID: {agent.id}") diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml index 37b86a929f37..95a2db669486 100644 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/Azure.AI.Projects -commit: 18f1b2507531ccb630ee66baf37652db84d4b520 +commit: 66f3f5a1184215abf25d93f185b55dfbc75b0050 repo: Azure/azure-rest-api-specs additionalDirectories: From 1581900c9fd0da0fa9638e45c0bbe1263fe4866b Mon Sep 17 00:00:00 2001 From: Howie Leung Date: Tue, 15 Apr 2025 14:52:38 -0700 Subject: [PATCH 11/13] Small update (#40527) --- sdk/ai/azure-ai-projects/CHANGELOG.md | 2 +- .../azure-ai-projects/samples/agents/multiagent/agent_team.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 58fb350038a9..826477c9f319 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -16,7 +16,7 @@ ### Bugs Fixed ### Breaking Changes -The toolset parameter in `create_agents` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. +Redesigned automatic function calls because agents retrieved by `update_agent` and `get_agent` do not support them. With the new design, the toolset parameter in `create_agent` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. ## 1.0.0b8 (2025-03-28) diff --git a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py index 560117e427c7..a01d7d9060e4 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py +++ b/sdk/ai/azure-ai-projects/samples/agents/multiagent/agent_team.py @@ -332,7 +332,6 @@ def process_request(self, request: str) -> None: ) print(f"Created and processed run for agent '{agent.name}', run ID: {run.id}") messages = self._project_client.agents.list_messages(thread_id=self._agent_thread.id) - print(run) print(messages) text_message = messages.get_last_text_message_by_role(role=MessageRole.AGENT) if text_message and text_message.text: From 311c91ba44e23f7b48420cd411fab5e2c2e08740 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Wed, 16 Apr 2025 11:17:31 -0700 Subject: [PATCH 12/13] Address code review comments in beta9 feature branch PR (#40543) * Fixes * Suppress Pylint errors --- sdk/ai/azure-ai-projects/CHANGELOG.md | 7 ++++--- sdk/ai/azure-ai-projects/README.md | 4 ++-- .../azure/ai/projects/aio/operations/_patch.py | 2 +- .../azure-ai-projects/azure/ai/projects/models/_patch.py | 7 +++++-- .../azure/ai/projects/telemetry/_trace_function.py | 2 +- 5 files changed, 13 insertions(+), 9 deletions(-) diff --git a/sdk/ai/azure-ai-projects/CHANGELOG.md b/sdk/ai/azure-ai-projects/CHANGELOG.md index 826477c9f319..9b7eb76470e6 100644 --- a/sdk/ai/azure-ai-projects/CHANGELOG.md +++ b/sdk/ai/azure-ai-projects/CHANGELOG.md @@ -1,21 +1,22 @@ # Release History -## 1.0.0b9 (Unreleased) +## 1.0.0b9 (2025-04-16) ### Features added + * Utilities to load prompt template strings and Prompty file content * Added BingCustomSearchTool class with sample * Added list_threads API to agents namespace * Added image input support for agents create_message ### Sample updates + * Added `project_client.agents.enable_auto_function_calls(toolset=toolset)` to all samples that has `toolcalls` executed by `azure-ai-project` SDK * New BingCustomSearchTool sample * New samples added for image input from url, file and base64 -### Bugs Fixed - ### Breaking Changes + Redesigned automatic function calls because agents retrieved by `update_agent` and `get_agent` do not support them. With the new design, the toolset parameter in `create_agent` no longer executes toolcalls automatically during `create_and_process_run` or `create_stream`. To retain this behavior, call `enable_auto_function_calls` without additional changes. ## 1.0.0b8 (2025-03-28) diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 922f50231e31..7c344ab1121e 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -294,7 +294,7 @@ agent = project_client.agents.create_agent( -Also notices that if you use asynchronous client, you use `AsyncToolSet` instead. Additional information related to `AsyncFunctionTool` be discussed in the later sections. +Also notice that if you use the asynchronous client, use `AsyncToolSet` instead. Additional information related to `AsyncFunctionTool` be discussed in the later sections. Here is an example to use `tools` and `tool_resources`: @@ -561,7 +561,7 @@ agent = await project_client.agents.create_agent( -Notices that if `enable_auto_function_calls` is called, the SDK will invoke the functions automatically during `create_and_process_run` or streaming. If you prefer to execute them manually, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) or +Notice that if `enable_auto_function_calls` is called, the SDK will invoke the functions automatically during `create_and_process_run` or streaming. If you prefer to execute them manually, refer to [`sample_agents_stream_eventhandler_with_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_stream_eventhandler_with_functions.py) or [`sample_agents_functions.py`](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-projects/samples/agents/sample_agents_functions.py) #### Create Agent With Azure Function Call diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index d60548fe5141..b292a194771e 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio import concurrent.futures import io import logging diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index ad865cc6adbe..13fa95175ad7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio import base64 import datetime import inspect @@ -845,12 +845,15 @@ def __init__( Constructor initializes the tool with a primary API definition. :param name: The name of the API. + :type name: str :param description: The API description. + :type description: str :param spec: The API specification. + :type spec: Any :param auth: Authentication details for the API. :type auth: OpenApiAuthDetails :param default_parameters: List of OpenAPI spec parameters that will use user-provided defaults. - :type default_parameters: OpenApiAuthDetails + :type default_parameters: Optional[List[str]] """ default_params: List[str] = [] if default_parameters is None else default_parameters self._default_auth = auth diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py index 1890a6f1e88d..7a1284e88af5 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/telemetry/_trace_function.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. # ------------------------------------ import functools -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio from typing import Any, Callable, Optional, Dict try: From 989c90d44f6c0135f1650a501a66196958959119 Mon Sep 17 00:00:00 2001 From: howieleung Date: Wed, 16 Apr 2025 13:02:27 -0700 Subject: [PATCH 13/13] Fix toolcall error --- .../azure-ai-projects/azure/ai/projects/models/_patch.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py index 13fa95175ad7..62fe1890d7cb 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_patch.py @@ -734,7 +734,7 @@ def execute(self, tool_call: RequiredFunctionToolCall) -> Any: try: function, parsed_arguments = self._get_func_and_args(tool_call) return function(**parsed_arguments) if parsed_arguments else function() - except TypeError as e: + except Exception as e: error_message = f"Error executing function '{tool_call.function.name}': {e}" logging.error(error_message) # Return error message as JSON string back to agent in order to make possible self @@ -745,13 +745,12 @@ def execute(self, tool_call: RequiredFunctionToolCall) -> Any: class AsyncFunctionTool(BaseFunctionTool): async def execute(self, tool_call: RequiredFunctionToolCall) -> Any: # pylint: disable=invalid-overridden-method - function, parsed_arguments = self._get_func_and_args(tool_call) - try: + function, parsed_arguments = self._get_func_and_args(tool_call) if inspect.iscoroutinefunction(function): return await function(**parsed_arguments) if parsed_arguments else await function() return function(**parsed_arguments) if parsed_arguments else function() - except TypeError as e: + except Exception as e: error_message = f"Error executing function '{tool_call.function.name}': {e}" logging.error(error_message) # Return error message as JSON string back to agent in order to make possible self correction