Skip to content

Copy over promp operations #40508

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/ai/azure-ai-projects-onedp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ Operation returned an invalid status 'Unauthorized'

### Logging

The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following:
The client uses the standard [Python logging library](https://docs.python.org/3/library/logging.html). The SDK logs HTTP request and response details, which may be useful in troubleshooting. To log to stdout, add the following at the top of your Python script:

```python
import sys
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from azure.core.credentials import AzureKeyCredential, TokenCredential
from ._client import AIProjectClient as AIProjectClientGenerated
from .operations import TelemetryOperations, InferenceOperations, AssistantsOperations
from ._patch_prompts import PromptTemplate


class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-instance-attributes
Expand Down Expand Up @@ -54,7 +55,10 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre
self.assistants = AssistantsOperations(self)


__all__: List[str] = ["AIProjectClient"] # Add all objects you want publicly available to users at this package level
__all__: List[str] = [
"AIProjectClient",
"PromptTemplate",
] # Add all objects you want publicly available to users at this package level


def patch_sdk():
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
# pylint: disable=line-too-long,useless-suppression
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
# pylint: disable=line-too-long,R,no-member
"""Customize generated code here.

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""

import traceback
import sys
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing_extensions import Self


class PromptTemplate:
"""A helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array.
Prompty library is required to use this class (`pip install prompty`).
"""

_MISSING_PROMPTY_PACKAGE_MESSAGE = (
"The 'prompty' package is required in order to use the 'PromptTemplate' class. "
"Please install it by running 'pip install prompty'."
)

@classmethod
def from_prompty(cls, file_path: str) -> Self:
"""Initialize a PromptTemplate object from a prompty file.

:param file_path: The path to the prompty file.
:type file_path: str
:return: The PromptTemplate object.
:rtype: PromptTemplate
"""
if not file_path:
raise ValueError("Please provide file_path")

try:
from prompty import load
except ImportError as exc:
raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc

# Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because:
# In the stack, the last function is the current function.
# The second last function is the caller function, which is the root of the file_path.
stack = traceback.extract_stack()
caller = Path(stack[-2].filename)
abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute()

prompty = load(str(abs_file_path))
prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2
return cls(prompty=prompty)

@classmethod
def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self:
"""Initialize a PromptTemplate object from a message template.

:param prompt_template: The prompt template string.
:type prompt_template: str
:param api: The API type, e.g. "chat" or "completion".
:type api: str
:param model_name: The model name, e.g. "gpt-4o-mini".
:type model_name: str
:return: The PromptTemplate object.
:rtype: PromptTemplate
"""
try:
from prompty import headless
except ImportError as exc:
raise ImportError(cls._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc

prompt_template = cls._remove_leading_empty_space(prompt_template)
prompty = headless(api=api, content=prompt_template)
prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2
prompty.template.parser = "prompty"
return cls(
api=api,
model_name=model_name,
prompty=prompty,
)

@classmethod
def _remove_leading_empty_space(cls, multiline_str: str) -> str:
"""
Processes a multiline string by:
1. Removing empty lines
2. Finding the minimum leading spaces
3. Indenting all lines to the minimum level

:param multiline_str: The input multiline string.
:type multiline_str: str
:return: The processed multiline string.
:rtype: str
"""
lines = multiline_str.splitlines()
start_index = 0
while start_index < len(lines) and lines[start_index].strip() == "":
start_index += 1

# Find the minimum number of leading spaces
min_spaces = sys.maxsize
for line in lines[start_index:]:
if len(line.strip()) == 0:
continue
spaces = len(line) - len(line.lstrip())
spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces
min_spaces = min(min_spaces, spaces)

# Remove leading spaces and indent to the minimum level
processed_lines = []
for line in lines[start_index:]:
processed_lines.append(line[min_spaces:])

return "\n".join(processed_lines)

def __init__(
self,
*,
api: str = "chat",
prompty: Optional["Prompty"] = None, # type: ignore[name-defined]
prompt_template: Optional[str] = None,
model_name: Optional[str] = None,
) -> None:
"""Create a PromptTemplate object.

:keyword api: The API type.
:paramtype api: str
:keyword prompty: Optional Prompty object.
:paramtype prompty: ~prompty.Prompty or None.
:keyword prompt_tmplate: Optional prompt template string.
:paramtype prompt_template: str or None.
:keyword model_name: Optional AI Model name.
:paramtype model_name: str or None.
"""
self.prompty = prompty
if self.prompty is not None:
self.model_name = (
self.prompty.model.configuration["azure_deployment"]
if "azure_deployment" in self.prompty.model.configuration
else None
)
self.parameters = self.prompty.model.parameters
self._config = {}
elif prompt_template is not None:
self.model_name = model_name
self.parameters = {}
# _config is a dict to hold the internal configuration
self._config = {
"api": api if api is not None else "chat",
"prompt_template": prompt_template,
}
else:
raise ValueError("Please pass valid arguments for PromptTemplate")

def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]:
"""Render the prompt template with the given data.

:param data: The data to render the prompt template with.
:type data: Optional[Dict[str, Any]]
:return: The rendered prompt template.
:rtype: List[Dict[str, Any]]
"""
try:
from prompty import prepare
except ImportError as exc:
raise ImportError(self._MISSING_PROMPTY_PACKAGE_MESSAGE) from exc

if data is None:
data = kwargs

if self.prompty is not None:
parsed = prepare(self.prompty, data)
return parsed # type: ignore
else:
raise ValueError("Please provide valid prompt template")


def patch_sdk():
"""Do not remove from this file.

`patch_sdk` is a last resort escape hatch that allows you to do customizations
you can't accomplish using the techniques described in
https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kw
blob_name,
)
with file_path.open("rb") as data: # Open the file for reading in binary mode
# TODO: Is there an upload_folder?
# See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob
container_client.upload_blob(name=str(blob_name), data=data, **kwargs)
logger.debug("[upload_folder_and_create] Done uploaded.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # t
) from e

endpoint = self._get_inference_url(self._outer_instance._config.endpoint) # pylint: disable=protected-access
# TODO: Remove this before //build?
# Older Inference SDK versions use ml.azure.com as the scope. Make sure to set the correct value here. This
# is only relevent of course if EntraID auth is used.
credential_scopes = ["https://cognitiveservices.azure.com/.default"]
Expand Down Expand Up @@ -243,6 +244,7 @@ def get_azure_openai_client(
# use https://{resource-name}.openai.azure.com where {resource-name} is the same as the
# foundry API endpoint (https://{resource-name}.services.ai.azure.com)

# TODO: Confirm that it's okay to do two REST API calls here.
# If the connection uses API key authentication, we need to make another service call to get
# the connection with API key populated.
if connection.credentials.auth_type == CredentialType.API_KEY:
Expand Down
1 change: 1 addition & 0 deletions sdk/ai/azure-ai-projects-onedp/dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ aiohttp
azure.storage.blob
azure.ai.inference
openai
prompty
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
---
name: Basic Prompt
description: A basic prompt that uses the GPT-3 chat API to answer questions
authors:
- author_1
- author_2
model:
api: chat
configuration:
azure_deployment: gpt-4o-mini
parameters:
temperature: 1
frequency_penalty: 0.5
presence_penalty: 0.5
---
system:
You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services.

# context
{{#rules}}
{{rule}}
{{/rules}}

{{#chat_history}}
{{role}}:
{{content}}
{{/chat_history}}

user:
{{input}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------

"""
DESCRIPTION:
Given an AIProjectClient, this sample demonstrates how to
* Get an authenticated ChatCompletionsClient from the azure.ai.inference package
* Define a Mustache template, and render the template with provided parameters to create a list of chat messages.
* Perform one chat completion operation.
Package azure.ai.inference required. For more information see https://pypi.org/project/azure-ai-inference/.
Package prompty required. For more information see https://pypi.org/project/prompty/.

USAGE:
sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py

Before running the sample:

pip install azure-ai-projects azure-ai-inference azure-identity prompty

Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
from azure.identity import DefaultAzureCredential
from azure.ai.projects.onedp import AIProjectClient, PromptTemplate

endpoint = os.environ["PROJECT_ENDPOINT"]
model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"]

with AIProjectClient(
endpoint=endpoint,
credential=DefaultAzureCredential(exclude_interactive_browser_credential=False),
) as project_client:

with project_client.inference.get_chat_completions_client() as client:

prompt_template_str = """
system:
You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services.

# context
{{#rules}}
{{rule}}
{{/rules}}

{{#chat_history}}
{{role}}:
{{content}}
{{/chat_history}}

user:
{{input}}
"""
prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str)

input = "When I arrived, can I still have breakfast?"
rules = [
{"rule": "The check-in time is 3pm"},
{"rule": "The check-out time is 11am"},
{"rule": "Breakfast is served from 7am to 10am"},
]
chat_history = [
{"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"},
{"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."},
]
messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history)
print(messages)

response = client.complete(model=model_deployment_name, messages=messages)

print(response.choices[0].message.content)
Loading
Loading