Skip to content

Push updates from the latest typespec #40616

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 22, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion sdk/ai/azure-ai-assistants/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -652,7 +652,7 @@ auth = OpenApiAnonymousAuthDetails()

# Initialize assistant OpenApi tool using the read in OpenAPI spec
openapi_tool = OpenApiTool(
name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth
name="get_weather", spec=openapi_weather, description="Retrieve weather information for a location", auth=auth, default_parameters=["format"]
)
openapi_tool.add_definition(
name="get_countries", spec=openapi_countries, description="Retrieve a list of countries", auth=auth
Expand Down Expand Up @@ -743,6 +743,19 @@ thread = assistants_client.create_thread(tool_resources=file_search.resources)
```

<!-- END SNIPPET -->

#### List Threads

To list all threads attached to a given agent, use the list_threads API:

<!-- SNIPPET:sample_assistants_basics.list_threads -->

```python
threads = assistants_client.list_threads()
```

<!-- END SNIPPET -->

### Create Message

To create a message for assistant to process, you pass `user` as `role` and a question as `content`:
Expand Down
4 changes: 4 additions & 0 deletions sdk/ai/azure-ai-assistants/apiview-properties.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
"azure.ai.assistants.models.BingGroundingToolDefinition": "Azure.AI.Assistants.BingGroundingToolDefinition",
"azure.ai.assistants.models.CodeInterpreterToolDefinition": "Azure.AI.Assistants.CodeInterpreterToolDefinition",
"azure.ai.assistants.models.CodeInterpreterToolResource": "Azure.AI.Assistants.CodeInterpreterToolResource",
"azure.ai.assistants.models.ConnectedAgentDetails": "Azure.AI.Assistants.ConnectedAgentDetails",
"azure.ai.assistants.models.ConnectedAgentToolDefinition": "Azure.AI.Assistants.ConnectedAgentToolDefinition",
"azure.ai.assistants.models.FileDeletionStatus": "Azure.AI.Assistants.FileDeletionStatus",
"azure.ai.assistants.models.FileListResponse": "Azure.AI.Assistants.FileListResponse",
"azure.ai.assistants.models.FileSearchRankingOptions": "Azure.AI.Assistants.FileSearchRankingOptions",
Expand Down Expand Up @@ -67,6 +69,7 @@
"azure.ai.assistants.models.MicrosoftFabricToolDefinition": "Azure.AI.Assistants.MicrosoftFabricToolDefinition",
"azure.ai.assistants.models.OpenAIFile": "Azure.AI.Assistants.OpenAIFile",
"azure.ai.assistants.models.OpenAIPageableListOfAssistant": "Azure.AI.Assistants.OpenAIPageableListOf",
"azure.ai.assistants.models.OpenAIPageableListOfAssistantThread": "Azure.AI.Assistants.OpenAIPageableListOf",
"azure.ai.assistants.models.OpenAIPageableListOfRunStep": "Azure.AI.Assistants.OpenAIPageableListOf",
"azure.ai.assistants.models.OpenAIPageableListOfThreadMessage": "Azure.AI.Assistants.OpenAIPageableListOf",
"azure.ai.assistants.models.OpenAIPageableListOfThreadRun": "Azure.AI.Assistants.OpenAIPageableListOf",
Expand Down Expand Up @@ -210,6 +213,7 @@
"azure.ai.assistants.AssistantsClient.get_thread": "Azure.AI.Assistants.getThread",
"azure.ai.assistants.AssistantsClient.update_thread": "Azure.AI.Assistants.updateThread",
"azure.ai.assistants.AssistantsClient.delete_thread": "Azure.AI.Assistants.deleteThread",
"azure.ai.assistants.AssistantsClient.list_threads": "Azure.AI.Assistants.listThreads",
"azure.ai.assistants.AssistantsClient.create_message": "Azure.AI.Assistants.createMessage",
"azure.ai.assistants.AssistantsClient.list_messages": "Azure.AI.Assistants.listMessages",
"azure.ai.assistants.AssistantsClient.get_message": "Azure.AI.Assistants.getMessage",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,40 @@ def build_assistants_delete_thread_request(thread_id: str, **kwargs: Any) -> Htt
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)


def build_assistants_list_threads_request(
*,
limit: Optional[int] = None,
order: Optional[Union[str, _models.ListSortOrder]] = None,
after: Optional[str] = None,
before: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})

api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview"))
accept = _headers.pop("Accept", "application/json")

# Construct URL
_url = "/threads"

# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if limit is not None:
_params["limit"] = _SERIALIZER.query("limit", limit, "int")
if order is not None:
_params["order"] = _SERIALIZER.query("order", order, "str")
if after is not None:
_params["after"] = _SERIALIZER.query("after", after, "str")
if before is not None:
_params["before"] = _SERIALIZER.query("before", before, "str")

# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")

return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)


def build_assistants_create_message_request(thread_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
Expand Down Expand Up @@ -2326,6 +2360,92 @@ def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDeletion

return deserialized # type: ignore

@distributed_trace
def list_threads(
self,
*,
limit: Optional[int] = None,
order: Optional[Union[str, _models.ListSortOrder]] = None,
after: Optional[str] = None,
before: Optional[str] = None,
**kwargs: Any
) -> _models.OpenAIPageableListOfAssistantThread:
"""Gets a list of threads that were previously created.

:keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and
100, and the default is 20. Default value is None.
:paramtype limit: int
:keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order
and desc for descending order. Known values are: "asc" and "desc". Default value is None.
:paramtype order: str or ~azure.ai.assistants.models.ListSortOrder
:keyword after: A cursor for use in pagination. after is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects, ending with
obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the
list. Default value is None.
:paramtype after: str
:keyword before: A cursor for use in pagination. before is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects, ending with
obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of
the list. Default value is None.
:paramtype before: str
:return: OpenAIPageableListOfAssistantThread. The OpenAIPageableListOfAssistantThread is
compatible with MutableMapping
:rtype: ~azure.ai.assistants.models.OpenAIPageableListOfAssistantThread
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}

cls: ClsType[_models.OpenAIPageableListOfAssistantThread] = kwargs.pop("cls", None)

_request = build_assistants_list_threads_request(
limit=limit,
order=order,
after=after,
before=before,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)

_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)

response = pipeline_response.http_response

if response.status_code not in [200]:
if _stream:
try:
response.read() # Load the body in memory and close the socket
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)

if _stream:
deserialized = response.iter_bytes()
else:
deserialized = _deserialize(_models.OpenAIPageableListOfAssistantThread, response.json())

if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore

return deserialized # type: ignore

@overload
def create_message(
self,
Expand Down
4 changes: 2 additions & 2 deletions sdk/ai/azure-ai-assistants/azure/ai/assistants/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre
f"/Microsoft.MachineLearningServices/workspaces/{project_name}"
)
# Override the credential scope with the legacy one.
kwargs['credential_scopes'] = ["https://management.azure.com/.default"]
kwargs["credential_scopes"] = ["https://management.azure.com/.default"]
# End of legacy endpoints handling.
super().__init__(endpoint, credential, **kwargs)
self._toolset: Dict[str, _models.ToolSet] = {}
Expand Down Expand Up @@ -1791,7 +1791,7 @@ def upload_file(
"""
Uploads a file for use by other operations, delegating to the generated operations.



:param body: JSON. Required if `file` and `purpose` are not provided.
:type body: Optional[JSON]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
build_assistants_list_messages_request,
build_assistants_list_run_steps_request,
build_assistants_list_runs_request,
build_assistants_list_threads_request,
build_assistants_list_vector_store_file_batch_files_request,
build_assistants_list_vector_store_files_request,
build_assistants_list_vector_stores_request,
Expand Down Expand Up @@ -1222,6 +1223,92 @@ async def delete_thread(self, thread_id: str, **kwargs: Any) -> _models.ThreadDe

return deserialized # type: ignore

@distributed_trace_async
async def list_threads(
self,
*,
limit: Optional[int] = None,
order: Optional[Union[str, _models.ListSortOrder]] = None,
after: Optional[str] = None,
before: Optional[str] = None,
**kwargs: Any
) -> _models.OpenAIPageableListOfAssistantThread:
"""Gets a list of threads that were previously created.

:keyword limit: A limit on the number of objects to be returned. Limit can range between 1 and
100, and the default is 20. Default value is None.
:paramtype limit: int
:keyword order: Sort order by the created_at timestamp of the objects. asc for ascending order
and desc for descending order. Known values are: "asc" and "desc". Default value is None.
:paramtype order: str or ~azure.ai.assistants.models.ListSortOrder
:keyword after: A cursor for use in pagination. after is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects, ending with
obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the
list. Default value is None.
:paramtype after: str
:keyword before: A cursor for use in pagination. before is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects, ending with
obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of
the list. Default value is None.
:paramtype before: str
:return: OpenAIPageableListOfAssistantThread. The OpenAIPageableListOfAssistantThread is
compatible with MutableMapping
:rtype: ~azure.ai.assistants.models.OpenAIPageableListOfAssistantThread
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}

cls: ClsType[_models.OpenAIPageableListOfAssistantThread] = kwargs.pop("cls", None)

_request = build_assistants_list_threads_request(
limit=limit,
order=order,
after=after,
before=before,
api_version=self._config.api_version,
headers=_headers,
params=_params,
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)

_stream = kwargs.pop("stream", False)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)

response = pipeline_response.http_response

if response.status_code not in [200]:
if _stream:
try:
await response.read() # Load the body in memory and close the socket
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)

if _stream:
deserialized = response.iter_bytes()
else:
deserialized = _deserialize(_models.OpenAIPageableListOfAssistantThread, response.json())

if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore

return deserialized # type: ignore

@overload
async def create_message(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def __init__(
f"/Microsoft.MachineLearningServices/workspaces/{project_name}"
)
# Override the credential scope with the legacy one.
kwargs['credential_scopes'] = ["https://management.azure.com/.default"]
kwargs["credential_scopes"] = ["https://management.azure.com/.default"]
# End of legacy endpoints handling.
super().__init__(endpoint, credential, **kwargs)
self._toolset: Dict[str, _models.AsyncToolSet] = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
BingGroundingToolDefinition,
CodeInterpreterToolDefinition,
CodeInterpreterToolResource,
ConnectedAgentDetails,
ConnectedAgentToolDefinition,
FileDeletionStatus,
FileListResponse,
FileSearchRankingOptions,
Expand Down Expand Up @@ -79,6 +81,7 @@
MicrosoftFabricToolDefinition,
OpenAIFile,
OpenAIPageableListOfAssistant,
OpenAIPageableListOfAssistantThread,
OpenAIPageableListOfRunStep,
OpenAIPageableListOfThreadMessage,
OpenAIPageableListOfThreadRun,
Expand Down Expand Up @@ -240,6 +243,8 @@
"BingGroundingToolDefinition",
"CodeInterpreterToolDefinition",
"CodeInterpreterToolResource",
"ConnectedAgentDetails",
"ConnectedAgentToolDefinition",
"FileDeletionStatus",
"FileListResponse",
"FileSearchRankingOptions",
Expand Down Expand Up @@ -288,6 +293,7 @@
"MicrosoftFabricToolDefinition",
"OpenAIFile",
"OpenAIPageableListOfAssistant",
"OpenAIPageableListOfAssistantThread",
"OpenAIPageableListOfRunStep",
"OpenAIPageableListOfThreadMessage",
"OpenAIPageableListOfThreadRun",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ class AssistantsNamedToolChoiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta
"""Tool type ``azure_ai_search``"""
BING_CUSTOM_SEARCH = "bing_custom_search"
"""Tool type ``bing_custom_search``"""
CONNECTED_AGENT = "connected_agent"
"""Tool type ``connected_agent``"""


class AssistantStreamEvent(str, Enum, metaclass=CaseInsensitiveEnumMeta):
Expand Down
Loading
Loading