diff --git a/samples-v2/microsoft/python/quickstart/.env-template b/samples-v2/microsoft/python/quickstart/.env-template index 11d85978..14d01f4c 100644 --- a/samples-v2/microsoft/python/quickstart/.env-template +++ b/samples-v2/microsoft/python/quickstart/.env-template @@ -2,6 +2,6 @@ # Update with appropriate values # Rename the file to .env after updating -AZURE_AI_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" +AZURE_AI_FOUNDRY_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/" AZURE_AI_FOUNDRY_AGENT_NAME="MyAgent" AZURE_AI_FOUNDRY_MODEL_DEPLOYMENT_NAME="gpt-4.1-mini" \ No newline at end of file diff --git a/samples-v2/microsoft/python/quickstart/quickstart-chat-with-agent.py b/samples-v2/microsoft/python/quickstart/quickstart-chat-with-agent.py index a2d6d03c..8f1feb86 100644 --- a/samples-v2/microsoft/python/quickstart/quickstart-chat-with-agent.py +++ b/samples-v2/microsoft/python/quickstart/quickstart-chat-with-agent.py @@ -6,10 +6,11 @@ load_dotenv() project_client = AIProjectClient( - endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], + endpoint=os.environ["AZURE_AI_FOUNDRY_PROJECT_ENDPOINT"], credential=DefaultAzureCredential(), ) +agent_name = os.environ["AZURE_AI_FOUNDRY_AGENT_NAME"] openai_client = project_client.get_openai_client() # Optional Step: Create a conversation to use with the agent @@ -19,7 +20,7 @@ #Chat with the agent to answer questions response = openai_client.responses.create( conversation=conversation.id, #Optional conversation context for multi-turn - extra_body={"agent": {"name": "MyAgent", "type": "agent_reference"}}, + extra_body={"agent": {"name": agent_name, "type": "agent_reference"}}, input="What is the size of France in square miles?", ) print(f"Response output: {response.output_text}") @@ -27,7 +28,7 @@ #Optional Step: Ask a follow-up question in the same conversation response = openai_client.responses.create( conversation=conversation.id, - extra_body={"agent": {"name": "MyAgent", "type": "agent_reference"}}, + extra_body={"agent": {"name": agent_name, "type": "agent_reference"}}, input="And what is the capital city?", ) print(f"Response output: {response.output_text}") diff --git a/samples-v2/microsoft/python/quickstart/quickstart-create-agent.py b/samples-v2/microsoft/python/quickstart/quickstart-create-agent.py index 116f3c6c..69302a1d 100644 --- a/samples-v2/microsoft/python/quickstart/quickstart-create-agent.py +++ b/samples-v2/microsoft/python/quickstart/quickstart-create-agent.py @@ -7,15 +7,17 @@ load_dotenv() project_client = AIProjectClient( - endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], + endpoint=os.environ["AZURE_AI_FOUNDRY_PROJECT_ENDPOINT"], credential=DefaultAzureCredential(), ) +# agent = project_client.agents.create_version( - agent_name="MyAgent", + agent_name=os.environ["AZURE_AI_FOUNDRY_AGENT_NAME"], definition=PromptAgentDefinition( - model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + model=os.environ["AZURE_AI_FOUNDRY_MODEL_DEPLOYMENT_NAME"], instructions="You are a helpful assistant that answers general questions", ), ) print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})") +# diff --git a/samples-v2/microsoft/python/quickstart/quickstart-responses.py b/samples-v2/microsoft/python/quickstart/quickstart-responses.py index 78a18e81..1c56e6e9 100644 --- a/samples-v2/microsoft/python/quickstart/quickstart-responses.py +++ b/samples-v2/microsoft/python/quickstart/quickstart-responses.py @@ -5,15 +5,20 @@ load_dotenv() +print(f"Using AZURE_AI_FOUNDRY_PROJECT_ENDPOINT: {os.environ['AZURE_AI_FOUNDRY_PROJECT_ENDPOINT']}") +print(f"Using AZURE_AI_FOUNDRY_MODEL_DEPLOYMENT_NAME: {os.environ['AZURE_AI_FOUNDRY_MODEL_DEPLOYMENT_NAME']}") + project_client = AIProjectClient( - endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], + endpoint=os.environ["AZURE_AI_FOUNDRY_PROJECT_ENDPOINT"], credential=DefaultAzureCredential(), ) +# openai_client = project_client.get_openai_client() response = openai_client.responses.create( - model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"], + model=os.environ["AZURE_AI_FOUNDRY_MODEL_DEPLOYMENT_NAME"], input="What is the size of France in square miles?", ) -print(f"Response output: {response.output_text}") \ No newline at end of file +print(f"Response output: {response.output_text}") +# \ No newline at end of file diff --git a/samples-v2/microsoft/python/quickstart/requirements.txt b/samples-v2/microsoft/python/quickstart/requirements.txt index 1f088342..241064aa 100644 --- a/samples-v2/microsoft/python/quickstart/requirements.txt +++ b/samples-v2/microsoft/python/quickstart/requirements.txt @@ -1,3 +1,4 @@ azure-ai-projects>=2.0.0a20250915020 azure-identity python-dotenv +openai \ No newline at end of file