Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Prompty support to AI Foundry Projects SDK #40106

Open
wants to merge 2 commits into
base: feature/azure-ai-projects-beta8
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .vscode/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,7 @@
"prebuilts",
"premf",
"prevsnapshot",
"prompty",
"pschema",
"PSECRET",
"pydantic",
Expand Down
1 change: 1 addition & 0 deletions sdk/ai/azure-ai-projects/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
### Bugs Fixed

* Fix for a bug in agent tracing causing event handler return values to not be returned when tracing is enabled.
* Add Prompty support from AI Foundry Inference SDK into AI Foundry Projects SDK

### Breaking Changes

Expand Down
15 changes: 15 additions & 0 deletions sdk/ai/azure-ai-projects/azure/ai/projects/prompts/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
# pylint: disable=unused-import
try:
import prompty # pylint: disable=unused-import
except ImportError:
raise ImportError(
"The 'prompty' package is required to use the 'azure.ai.projects.prompts' module. "
"Please install it by running 'pip install prompty'."
)

from ._patch import patch_sdk as _patch_sdk, PromptTemplate
_patch_sdk()
121 changes: 121 additions & 0 deletions sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_patch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
# pylint: disable=line-too-long,R
"""Customize generated code here.

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""

import traceback
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing_extensions import Self
from prompty import headless, load, prepare
from prompty.core import Prompty
from ._utils import remove_leading_empty_space


class PromptTemplate:
"""The helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array."""

@classmethod
def from_prompty(cls, file_path: str) -> Self:
"""Initialize a PromptTemplate object from a prompty file.

:param file_path: The path to the prompty file.
:type file_path: str
:return: The PromptTemplate object.
:rtype: PromptTemplate
"""
if not file_path:
raise ValueError("Please provide file_path")

# Get the absolute path of the file by `traceback.extract_stack()`, it's "-2" because:
# In the stack, the last function is the current function.
# The second last function is the caller function, which is the root of the file_path.
stack = traceback.extract_stack()
caller = Path(stack[-2].filename)
abs_file_path = Path(caller.parent / Path(file_path)).resolve().absolute()

prompty = load(str(abs_file_path))
prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2
return cls(prompty=prompty)

@classmethod
def from_string(cls, prompt_template: str, api: str = "chat", model_name: Optional[str] = None) -> Self:
"""Initialize a PromptTemplate object from a message template.

:param prompt_template: The prompt template string.
:type prompt_template: str
:param api: The API type, e.g. "chat" or "completion".
:type api: str
:param model_name: The model name, e.g. "gpt-4o-mini".
:type model_name: str
:return: The PromptTemplate object.
:rtype: PromptTemplate
"""
prompt_template = remove_leading_empty_space(prompt_template)
prompty = headless(api=api, content=prompt_template)
prompty.template.type = "mustache" # For Azure, default to mustache instead of Jinja2
prompty.template.parser = "prompty"
return cls(
api=api,
model_name=model_name,
prompty=prompty,
)

def __init__(
self,
*,
api: str = "chat",
prompty: Optional[Prompty] = None,
prompt_template: Optional[str] = None,
model_name: Optional[str] = None,
) -> None:
self.prompty = prompty
if self.prompty is not None:
self.model_name = (
self.prompty.model.configuration["azure_deployment"]
if "azure_deployment" in self.prompty.model.configuration
else None
)
self.parameters = self.prompty.model.parameters
self._config = {}
elif prompt_template is not None:
self.model_name = model_name
self.parameters = {}
# _config is a dict to hold the internal configuration
self._config = {
"api": api if api is not None else "chat",
"prompt_template": prompt_template,
}
else:
raise ValueError("Please pass valid arguments for PromptTemplate")

def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]:
"""Render the prompt template with the given data.

:param data: The data to render the prompt template with.
:type data: Optional[Dict[str, Any]]
:return: The rendered prompt template.
:rtype: List[Dict[str, Any]]
"""
if data is None:
data = kwargs

if self.prompty is not None:
parsed = prepare(self.prompty, data)
return parsed
else:
raise ValueError("Please provide valid prompt template")


def patch_sdk():
"""Do not remove from this file.

`patch_sdk` is a last resort escape hatch that allows you to do customizations
you can't accomplish using the techniques described in
https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
39 changes: 39 additions & 0 deletions sdk/ai/azure-ai-projects/azure/ai/projects/prompts/_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import sys


def remove_leading_empty_space(multiline_str: str) -> str:
"""
Processes a multiline string by:
1. Removing empty lines
2. Finding the minimum leading spaces
3. Indenting all lines to the minimum level

:param multiline_str: The input multiline string.
:type multiline_str: str
:return: The processed multiline string.
:rtype: str
"""
lines = multiline_str.splitlines()
start_index = 0
while start_index < len(lines) and lines[start_index].strip() == "":
start_index += 1

# Find the minimum number of leading spaces
min_spaces = sys.maxsize
for line in lines[start_index:]:
if len(line.strip()) == 0:
continue
spaces = len(line) - len(line.lstrip())
spaces += line.lstrip().count("\t") * 2 # Count tabs as 2 spaces
min_spaces = min(min_spaces, spaces)

# Remove leading spaces and indent to the minimum level
processed_lines = []
for line in lines[start_index:]:
processed_lines.append(line[min_spaces:])

return "\n".join(processed_lines)
30 changes: 30 additions & 0 deletions sdk/ai/azure-ai-projects/samples/inference/sample1.prompty
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
---
name: Basic Prompt
description: A basic prompt that uses the GPT-3 chat API to answer questions
authors:
- author_1
- author_2
model:
api: chat
configuration:
azure_deployment: gpt-4o-mini
parameters:
temperature: 1
frequency_penalty: 0.5
presence_penalty: 0.5
---
system:
You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services.

# context
{{#rules}}
{{rule}}
{{/rules}}

{{#chat_history}}
{{role}}:
{{content}}
{{/chat_history}}

user:
{{input}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------

"""
DESCRIPTION:
Given an AIProjectClient, this sample demonstrates how to get an authenticated
async ChatCompletionsClient from the azure.ai.inference package, and then work with a prompt string.
For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/.

USAGE:
python sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py

Before running the sample:

pip install azure-ai-projects azure-identity

Set these environment variables with your own values:
* PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project.
* MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
"""

import os
from azure.ai.projects import AIProjectClient
from azure.ai.projects.prompts import PromptTemplate
from azure.ai.inference.models import UserMessage
from azure.identity import DefaultAzureCredential

project_connection_string = os.environ["PROJECT_CONNECTION_STRING"]
model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"]

with AIProjectClient.from_connection_string(
credential=DefaultAzureCredential(),
conn_str=project_connection_string,
) as project_client:

with project_client.inference.get_chat_completions_client() as client:

prompt_template_str = """
system:
You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services.

# context
{{#rules}}
{{rule}}
{{/rules}}

{{#chat_history}}
{{role}}:
{{content}}
{{/chat_history}}

user:
{{input}}
"""
prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str)

input = "When I arrived, can I still have breakfast?"
rules = [
{"rule": "The check-in time is 3pm"},
{"rule": "The check-out time is 11am"},
{"rule": "Breakfast is served from 7am to 10am"},
]
chat_history = [
{"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"},
{"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."},
]
messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history)

response = client.complete(
model=model_deployment_name, messages=messages
)

print(response.choices[0].message.content)
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------

"""
DESCRIPTION:
Given an AIProjectClient, this sample demonstrates how to get an authenticated
async ChatCompletionsClient from the azure.ai.inference package, and then work with Prompty.
For more information on the azure.ai.inference package see https://pypi.org/project/azure-ai-inference/.

USAGE:
python sample_chat_completions_with_azure_ai_inference_client_and_prompty.py

Before running the sample:

pip install azure-ai-projects azure-identity

Set these environment variables with your own values:
* PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Foundry project.
* MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
"""

import os
from azure.ai.projects import AIProjectClient
from azure.ai.projects.prompts import PromptTemplate
from azure.ai.inference.models import UserMessage
from azure.identity import DefaultAzureCredential

project_connection_string = os.environ["PROJECT_CONNECTION_STRING"]
model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"]

with AIProjectClient.from_connection_string(
credential=DefaultAzureCredential(),
conn_str=project_connection_string,
) as project_client:

with project_client.inference.get_chat_completions_client() as client:

path = "./sample1.prompty"
prompt_template = PromptTemplate.from_prompty(file_path=path)

input = "When I arrived, can I still have breakfast?"
rules = [
{"rule": "The check-in time is 3pm"},
{"rule": "The check-out time is 11am"},
{"rule": "Breakfast is served from 7am to 10am"},
]
chat_history = [
{"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"},
{"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."},
]
messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history)

response = client.complete(
model=model_deployment_name, messages=messages
)

print(response.choices[0].message.content)
3 changes: 3 additions & 0 deletions sdk/ai/azure-ai-projects/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,7 @@
"typing-extensions>=4.12.2",
],
python_requires=">=3.8",
extras_require={
"prompts": ["prompty", "pyyaml"],
},
)
3 changes: 2 additions & 1 deletion shared_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,5 @@ dnspython
promptflow-core
promptflow-devkit
nltk
azure-monitor-opentelemetry
azure-monitor-opentelemetry
prompty
Loading