From f763ccb5bf50d2190acd288ffd8ad33f137c9fbc Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 14 Apr 2025 16:38:20 -0700 Subject: [PATCH 1/5] First --- sdk/ai/azure-ai-projects-onedp/README.md | 2 +- .../onedp/operations/_patch_datasets.py | 1 + .../onedp/operations/_patch_inference.py | 2 + .../ai/projects/onedp/prompts/__init__.py | 16 +++ .../azure/ai/projects/onedp/prompts/_patch.py | 124 ++++++++++++++++++ .../azure/ai/projects/onedp/prompts/_utils.py | 39 ++++++ .../dev_requirements.txt | 1 + .../samples/inference/sample1.prompty | 30 +++++ ...e_ai_inference_client_and_prompt_string.py | 77 +++++++++++ ...re_ai_inference_client_and_prompty_file.py | 71 ++++++++++ 10 files changed, 362 insertions(+), 1 deletion(-) create mode 100644 sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py create mode 100644 sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py create mode 100644 sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py create mode 100644 sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty create mode 100644 sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py create mode 100644 sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py diff --git a/sdk/ai/azure-ai-projects-onedp/README.md b/sdk/ai/azure-ai-projects-onedp/README.md index dba59775b6d9..5d00a519973e 100644 --- a/sdk/ai/azure-ai-projects-onedp/README.md +++ b/sdk/ai/azure-ai-projects-onedp/README.md @@ -435,7 +435,7 @@ Operation returned an invalid status 'Unauthorized' ### Logging -The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following: +The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following at the top of your Python script: ```python import sys diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py index 3d62a12ed844..13b218c65bec 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_datasets.py @@ -192,6 +192,7 @@ def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kw blob_name, ) with file_path.open("rb") as data: # Open the file for reading in binary mode + # TODO: Is there an upload_folder? # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob container_client.upload_blob(name=str(blob_name), data=data, **kwargs) logger.debug("[upload_folder_and_create] Done uploaded.") diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py index d2f739e8d755..0aea6d42eaeb 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/operations/_patch_inference.py @@ -90,6 +90,7 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # t ) from e endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access + # TODO: Remove this before //build? # Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This # is only relevent of course if EntraID auth is used. credential_scopes = ["https://cognitiveservices.azure.com/.default"] @@ -243,6 +244,7 @@ def get_azure_openai_client( # use https://{resource-name}.openai.azure.com where {resource-name} is the same as the # foundry API endpoint (https://{resource-name}.services.ai.azure.com) + # TODO: Confirm that it's okay to do two REST API calls here. # If the connection uses API key authentication, we need to make another service call to get # the connection with API key populated. if connection.credentials.auth_type == CredentialType.API_KEY: diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py new file mode 100644 index 000000000000..f1e98bf1be1a --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py @@ -0,0 +1,16 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=unused-import +try: + import prompty # pylint: disable=unused-import +except ImportError as exc: + raise ImportError( + "The 'prompty' package is required to use the 'azure.ai.projects.prompts' module. " + "Please install it by running 'pip install prompty'." + ) from exc + +from ._patch import patch_sdk as _patch_sdk, PromptTemplate + +_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py new file mode 100644 index 000000000000..13fd07bcac99 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py @@ -0,0 +1,124 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +# pylint: disable=line-too-long,R,no-member +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +import traceback +from pathlib import Path +from typing import Any, Dict, List, Optional +from typing_extensions import Self +from prompty import headless, load, prepare +from prompty.core import Prompty +from ._utils import remove_leading_empty_space + + +class PromptTemplate: + """The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. + Prompty library is required to be installed to use this class. + """ + + @classmethod + def from_prompty(cls, file_path: str) -> Self: + """Initialize a PromptTemplate object from a prompty file. + + :param file_path: The path to the prompty file. + :type file_path: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + if not file_path: + raise ValueError("Please provide file_path") + + # Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because: + # In the stack, the last function is the current function. + # The second last function is the caller function, which is the root of the file_path. + stack = traceback.extract_stack() + caller = Path(stack[-2].filename) + abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute() + + prompty = load(str(abs_file_path)) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + return cls(prompty=prompty) + + @classmethod + def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self: + """Initialize a PromptTemplate object from a message template. + + :param prompt_template: The prompt template string. + :type prompt_template: str + :param api: The API type, e.g. "chat" or "completion". + :type api: str + :param model_name: The model name, e.g. "gpt-4o-mini". + :type model_name: str + :return: The PromptTemplate object. + :rtype: PromptTemplate + """ + prompt_template = remove_leading_empty_space(prompt_template) + prompty = headless(api=api, content=prompt_template) + prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 + prompty.template.parser = "prompty" + return cls( + api=api, + model_name=model_name, + prompty=prompty, + ) + + def __init__( + self, + *, + api: str = "chat", + prompty: Optional[Prompty] = None, + prompt_template: Optional[str] = None, + model_name: Optional[str] = None, + ) -> None: + self.prompty = prompty + if self.prompty is not None: + self.model_name = ( + self.prompty.model.configuration["azure_deployment"] + if "azure_deployment" in self.prompty.model.configuration + else None + ) + self.parameters = self.prompty.model.parameters + self._config = {} + elif prompt_template is not None: + self.model_name = model_name + self.parameters = {} + # _config is a dict to hold the internal configuration + self._config = { + "api": api if api is not None else "chat", + "prompt_template": prompt_template, + } + else: + raise ValueError("Please pass valid arguments for PromptTemplate") + + def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]: + """Render the prompt template with the given data. + + :param data: The data to render the prompt template with. + :type data: Optional[Dict[str, Any]] + :return: The rendered prompt template. + :rtype: List[Dict[str, Any]] + """ + if data is None: + data = kwargs + + if self.prompty is not None: + parsed = prepare(self.prompty, data) + return parsed # type: ignore + else: + raise ValueError("Please provide valid prompt template") + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py new file mode 100644 index 000000000000..a85e193322e5 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import sys + + +def remove_leading_empty_space(multiline_str: str) -> str: + """ + Processes a multiline string by: + 1. Removing empty lines + 2. Finding the minimum leading spaces + 3. Indenting all lines to the minimum level + + :param multiline_str: The input multiline string. + :type multiline_str: str + :return: The processed multiline string. + :rtype: str + """ + lines = multiline_str.splitlines() + start_index = 0 + while start_index < len(lines) and lines[start_index].strip() == "": + start_index += 1 + + # Find the minimum number of leading spaces + min_spaces = sys.maxsize + for line in lines[start_index:]: + if len(line.strip()) == 0: + continue + spaces = len(line) - len(line.lstrip()) + spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces + min_spaces = min(min_spaces, spaces) + + # Remove leading spaces and indent to the minimum level + processed_lines = [] + for line in lines[start_index:]: + processed_lines.append(line[min_spaces:]) + + return "\n".join(processed_lines) diff --git a/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt b/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt index 55f856d9df00..0d68a203d514 100644 --- a/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt +++ b/sdk/ai/azure-ai-projects-onedp/dev_requirements.txt @@ -5,3 +5,4 @@ aiohttp azure.storage.blob azure.ai.inference openai +prompty diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty new file mode 100644 index 000000000000..6dbcbf40bc6f --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample1.prompty @@ -0,0 +1,30 @@ +--- +name: Basic Prompt +description: A basic prompt that uses the GPT-3 chat API to answer questions +authors: + - author_1 + - author_2 +model: + api: chat + configuration: + azure_deployment: gpt-4o-mini + parameters: + temperature: 1 + frequency_penalty: 0.5 + presence_penalty: 0.5 +--- +system: +You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + +# context +{{#rules}} +{{rule}} +{{/rules}} + +{{#chat_history}} +{{role}}: +{{content}} +{{/chat_history}} + +user: +{{input}} diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py new file mode 100644 index 000000000000..d99334759988 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -0,0 +1,77 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to + * Get an authenticated ChatCompletionsClient from the azure.ai.inference package + * Define a Mustache template, and render the template with provided parameters to create a list of chat messages. + * Perform one chat completion operation. + Package azure.ai.inference required. For more information see https://pypi.org/project/azure-ai-inference/. + Package prompty required. For more information see https://pypi.org/project/prompty/. + +USAGE: + sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity prompty + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.prompts import PromptTemplate + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + prompt_template_str = """ + system: + You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + + # context + {{#rules}} + {{rule}} + {{/rules}} + + {{#chat_history}} + {{role}}: + {{content}} + {{/chat_history}} + + user: + {{input}} + """ + prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py new file mode 100644 index 000000000000..f55d42327e80 --- /dev/null +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py @@ -0,0 +1,71 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +""" +DESCRIPTION: + Given an AIProjectClient, this sample demonstrates how to + * Get an authenticated ChatCompletionsClient from the azure.ai.inference package + * Load a Prompty file and render a template with provided parameters to create a list of chat messages. + * Perform one chat completion operation. + Package azure.ai.inference required. For more information see https://pypi.org/project/azure-ai-inference/. + Package prompty required. For more information see https://pypi.org/project/prompty/. + +USAGE: + python sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py + + Before running the sample: + + pip install azure-ai-projects azure-ai-inference azure-identity prompty + + Set these environment variables with your own values: + 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your + Azure AI Foundry project. + 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. +""" + +import os +from azure.identity import DefaultAzureCredential +from azure.ai.projects.onedp import AIProjectClient +from azure.ai.projects.onedp.prompts import PromptTemplate + +# TODO: Remove console logging +import sys +import logging +logger = logging.getLogger("azure") +logger.setLevel(logging.DEBUG) +logger.addHandler(logging.StreamHandler(stream=sys.stdout)) +identity_logger = logging.getLogger("azure.identity") +identity_logger.setLevel(logging.ERROR) +# End logging + +endpoint = os.environ["PROJECT_ENDPOINT"] +model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] + +with AIProjectClient( + endpoint=endpoint, + credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), + logging_enable=True +) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + path = "./sample1.prompty" + prompt_template = PromptTemplate.from_prompty(file_path=path) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) From fe7cadd7e66ed5bca8754eefc21c60fb2d292dec Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 14 Apr 2025 16:45:58 -0700 Subject: [PATCH 2/5] Move utility function to patch file --- .../azure/ai/projects/onedp/prompts/_patch.py | 38 +++++++++++++++++- .../azure/ai/projects/onedp/prompts/_utils.py | 39 ------------------- 2 files changed, 36 insertions(+), 41 deletions(-) delete mode 100644 sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py index 13fd07bcac99..8ef5dd49f0a3 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py @@ -10,12 +10,12 @@ """ import traceback +import sys from pathlib import Path from typing import Any, Dict, List, Optional from typing_extensions import Self from prompty import headless, load, prepare from prompty.core import Prompty -from ._utils import remove_leading_empty_space class PromptTemplate: @@ -59,7 +59,7 @@ def from_string(cls, prompt_template: str, api: str = "chat", model_name: Option :return: The PromptTemplate object. :rtype: PromptTemplate """ - prompt_template = remove_leading_empty_space(prompt_template) + prompt_template = _remove_leading_empty_space(prompt_template) prompty = headless(api=api, content=prompt_template) prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 prompty.template.parser = "prompty" @@ -69,6 +69,40 @@ def from_string(cls, prompt_template: str, api: str = "chat", model_name: Option prompty=prompty, ) + @classmethod + def _remove_leading_empty_space(cls, multiline_str: str) -> str: + """ + Processes a multiline string by: + 1. Removing empty lines + 2. Finding the minimum leading spaces + 3. Indenting all lines to the minimum level + + :param multiline_str: The input multiline string. + :type multiline_str: str + :return: The processed multiline string. + :rtype: str + """ + lines = multiline_str.splitlines() + start_index = 0 + while start_index < len(lines) and lines[start_index].strip() == "": + start_index += 1 + + # Find the minimum number of leading spaces + min_spaces = sys.maxsize + for line in lines[start_index:]: + if len(line.strip()) == 0: + continue + spaces = len(line) - len(line.lstrip()) + spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces + min_spaces = min(min_spaces, spaces) + + # Remove leading spaces and indent to the minimum level + processed_lines = [] + for line in lines[start_index:]: + processed_lines.append(line[min_spaces:]) + + return "\n".join(processed_lines) + def __init__( self, *, diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py deleted file mode 100644 index a85e193322e5..000000000000 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_utils.py +++ /dev/null @@ -1,39 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import sys - - -def remove_leading_empty_space(multiline_str: str) -> str: - """ - Processes a multiline string by: - 1. Removing empty lines - 2. Finding the minimum leading spaces - 3. Indenting all lines to the minimum level - - :param multiline_str: The input multiline string. - :type multiline_str: str - :return: The processed multiline string. - :rtype: str - """ - lines = multiline_str.splitlines() - start_index = 0 - while start_index < len(lines) and lines[start_index].strip() == "": - start_index += 1 - - # Find the minimum number of leading spaces - min_spaces = sys.maxsize - for line in lines[start_index:]: - if len(line.strip()) == 0: - continue - spaces = len(line) - len(line.lstrip()) - spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces - min_spaces = min(min_spaces, spaces) - - # Remove leading spaces and indent to the minimum level - processed_lines = [] - for line in lines[start_index:]: - processed_lines.append(line[min_spaces:]) - - return "\n".join(processed_lines) From 47d53b82fe20cf420df4f7cdedbef693e90b3454 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:40:41 -0700 Subject: [PATCH 3/5] Move PromptTemplate class to root --- .../azure/ai/projects/onedp/_patch.py | 6 ++- .../{prompts/_patch.py => _patch_prompts.py} | 39 ++++++++++++++++--- .../ai/projects/onedp/prompts/__init__.py | 16 -------- ...e_ai_inference_client_and_prompt_string.py | 3 +- ...re_ai_inference_client_and_prompty_file.py | 14 +------ 5 files changed, 41 insertions(+), 37 deletions(-) rename sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/{prompts/_patch.py => _patch_prompts.py} (80%) delete mode 100644 sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py index efc3c278139e..37c3044466c1 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch.py @@ -10,6 +10,7 @@ from azure.core.credentials import AzureKeyCredential, TokenCredential from ._client import AIProjectClient as AIProjectClientGenerated from .operations import TelemetryOperations, InferenceOperations, AssistantsOperations +from ._patch_prompts import PromptTemplate class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-instance-attributes @@ -54,7 +55,10 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre self.assistants = AssistantsOperations(self) -__all__: List[str] = ["AIProjectClient"] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [ + "AIProjectClient", + "PromptTemplate", +] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py similarity index 80% rename from sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py rename to sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py index 8ef5dd49f0a3..3be4d3a93d5c 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/_patch.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py @@ -14,15 +14,18 @@ from pathlib import Path from typing import Any, Dict, List, Optional from typing_extensions import Self -from prompty import headless, load, prepare -from prompty.core import Prompty class PromptTemplate: """The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. - Prompty library is required to be installed to use this class. + Prompty library is required to use this class (`pip install prompty`). """ + _MISSING_PROMPTY_PACKAGE_MESSAGE = ( + "The 'prompty' package is required in order to use the 'PromptTemplate' class. " + "Please install it by running 'pip install prompty'." + ) + @classmethod def from_prompty(cls, file_path: str) -> Self: """Initialize a PromptTemplate object from a prompty file. @@ -35,6 +38,11 @@ def from_prompty(cls, file_path: str) -> Self: if not file_path: raise ValueError("Please provide file_path") + try: + from prompty import load + except ImportError as exc: + raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + # Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because: # In the stack, the last function is the current function. # The second last function is the caller function, which is the root of the file_path. @@ -59,7 +67,12 @@ def from_string(cls, prompt_template: str, api: str = "chat", model_name: Option :return: The PromptTemplate object. :rtype: PromptTemplate """ - prompt_template = _remove_leading_empty_space(prompt_template) + try: + from prompty import headless + except ImportError as exc: + raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + + prompt_template = cls._remove_leading_empty_space(prompt_template) prompty = headless(api=api, content=prompt_template) prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2 prompty.template.parser = "prompty" @@ -107,10 +120,21 @@ def __init__( self, *, api: str = "chat", - prompty: Optional[Prompty] = None, + prompty: Optional["Prompty"] = None, prompt_template: Optional[str] = None, model_name: Optional[str] = None, ) -> None: + """Create a PromptTemplate object. + + :keyword api: The API type. + :paramtype api: str + :keyword prompty: Optional Prompty object. + :paramtype prompty: ~prompty.Prompty or None. + :keyword prompt_tmplate: Optional prompt template string. + :paramtype prompt_template: str or None. + :keyword model_name: Optional AI Model name. + :paramtype model_name: str or None. + """ self.prompty = prompty if self.prompty is not None: self.model_name = ( @@ -139,6 +163,11 @@ def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> Li :return: The rendered prompt template. :rtype: List[Dict[str, Any]] """ + try: + from prompty import prepare + except ImportError as exc: + raise ImportError(self._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc + if data is None: data = kwargs diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py deleted file mode 100644 index f1e98bf1be1a..000000000000 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/prompts/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -# pylint: disable=unused-import -try: - import prompty # pylint: disable=unused-import -except ImportError as exc: - raise ImportError( - "The 'prompty' package is required to use the 'azure.ai.projects.prompts' module. " - "Please install it by running 'pip install prompty'." - ) from exc - -from ._patch import patch_sdk as _patch_sdk, PromptTemplate - -_patch_sdk() diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py index d99334759988..8dbfb7a7a31d 100644 --- a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -27,8 +27,7 @@ import os from azure.identity import DefaultAzureCredential -from azure.ai.projects.onedp import AIProjectClient -from azure.ai.projects.onedp.prompts import PromptTemplate +from azure.ai.projects.onedp import AIProjectClient, PromptTemplate endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] diff --git a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py index f55d42327e80..dbaf8d16848f 100644 --- a/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py +++ b/sdk/ai/azure-ai-projects-onedp/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py @@ -27,18 +27,7 @@ import os from azure.identity import DefaultAzureCredential -from azure.ai.projects.onedp import AIProjectClient -from azure.ai.projects.onedp.prompts import PromptTemplate - -# TODO: Remove console logging -import sys -import logging -logger = logging.getLogger("azure") -logger.setLevel(logging.DEBUG) -logger.addHandler(logging.StreamHandler(stream=sys.stdout)) -identity_logger = logging.getLogger("azure.identity") -identity_logger.setLevel(logging.ERROR) -# End logging +from azure.ai.projects.onedp import AIProjectClient, PromptTemplate endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] @@ -46,7 +35,6 @@ with AIProjectClient( endpoint=endpoint, credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), - logging_enable=True ) as project_client: with project_client.inference.get_chat_completions_client() as client: From 932473e2be6473e0d4c83bc1f2b9ae86c7add328 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Mon, 14 Apr 2025 20:13:13 -0700 Subject: [PATCH 4/5] Suppress mypy error --- .../azure/ai/projects/onedp/_patch_prompts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py index 3be4d3a93d5c..f31c1d3f4f51 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py @@ -17,7 +17,7 @@ class PromptTemplate: - """The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. + """A helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. Prompty library is required to use this class (`pip install prompty`). """ @@ -120,7 +120,7 @@ def __init__( self, *, api: str = "chat", - prompty: Optional["Prompty"] = None, + prompty: Optional["Prompty"] = None, # type: ignore[name-defined] prompt_template: Optional[str] = None, model_name: Optional[str] = None, ) -> None: From 94800cd7e6b5191e7cc89bd6ce39dd54dc6ae544 Mon Sep 17 00:00:00 2001 From: Darren Cohen <39422044+dargilco@users.noreply.github.com> Date: Tue, 15 Apr 2025 06:15:16 -0700 Subject: [PATCH 5/5] Fix spelling --- .../azure/ai/projects/onedp/_patch_prompts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py index f31c1d3f4f51..36d66f1c7bf1 100644 --- a/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py +++ b/sdk/ai/azure-ai-projects-onedp/azure/ai/projects/onedp/_patch_prompts.py @@ -130,7 +130,7 @@ def __init__( :paramtype api: str :keyword prompty: Optional Prompty object. :paramtype prompty: ~prompty.Prompty or None. - :keyword prompt_tmplate: Optional prompt template string. + :keyword prompt_template: Optional prompt template string. :paramtype prompt_template: str or None. :keyword model_name: Optional AI Model name. :paramtype model_name: str or None.