Skip to content

Commit dc6a8a5

Browse files
restructure: rebase Python samples on upstream/main with numbered folder layout
- Rebase on upstream/main to incorporate latest content updates (AzureOpenAIResponsesClient adoption, orchestration reorganization, workflow/orchestration bug fixes) - Move files from upstream structure into numbered folders: getting_started/agents/ -> 02-agents/providers/ getting_started/workflows/ -> 03-workflows/ getting_started/orchestrations/ -> 03-workflows/orchestrations/ getting_started/azure_functions/ -> 04-hosting/azure_functions/ getting_started/durabletask/ -> 04-hosting/durabletask/ concepts/ -> 02-agents/ demos/ -> 05-end-to-end/ - Update all internal path references (115+ files) - Preserve 01-get-started custom tutorial files - Keep a2a hosting samples
1 parent 6a88c7b commit dc6a8a5

501 files changed

Lines changed: 1254 additions & 542 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
import os
5+
6+
from agent_framework.azure import AzureOpenAIResponsesClient
7+
from azure.identity import AzureCliCredential
8+
9+
"""
10+
Hello Agent — Simplest possible agent
11+
12+
This sample creates a minimal agent using AzureOpenAIResponsesClient via an
13+
Azure AI Foundry project endpoint, and runs it in both non-streaming and streaming modes.
14+
15+
Environment variables:
16+
AZURE_AI_PROJECT_ENDPOINT — Your Azure AI Foundry project endpoint
17+
AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o)
18+
"""
19+
20+
21+
async def main() -> None:
22+
# <create_agent>
23+
credential = AzureCliCredential()
24+
client = AzureOpenAIResponsesClient(
25+
project_endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
26+
deployment_name=os.environ["AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME"],
27+
credential=credential,
28+
)
29+
30+
agent = client.as_agent(
31+
name="HelloAgent",
32+
instructions="You are a friendly assistant. Keep your answers brief.",
33+
)
34+
# </create_agent>
35+
36+
# <run_agent>
37+
# Non-streaming: get the complete response at once
38+
result = await agent.run("What is the capital of France?")
39+
print(f"Agent: {result}")
40+
# </run_agent>
41+
42+
# <run_agent_streaming>
43+
# Streaming: receive tokens as they are generated
44+
print("Agent (streaming): ", end="", flush=True)
45+
async for chunk in agent.run("Tell me a one-sentence fun fact.", stream=True):
46+
if chunk.text:
47+
print(chunk.text, end="", flush=True)
48+
print()
49+
# </run_agent_streaming>
50+
51+
52+
if __name__ == "__main__":
53+
asyncio.run(main())
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
import os
5+
from random import randint
6+
from typing import Annotated
7+
8+
from agent_framework import tool
9+
from agent_framework.azure import AzureOpenAIResponsesClient
10+
from azure.identity import AzureCliCredential
11+
from pydantic import Field
12+
13+
"""
14+
Add Tools — Give your agent a function tool
15+
16+
This sample shows how to define a function tool with the @tool decorator
17+
and wire it into an agent so the model can call it.
18+
19+
Environment variables:
20+
AZURE_AI_PROJECT_ENDPOINT — Your Azure AI Foundry project endpoint
21+
AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o)
22+
"""
23+
24+
25+
# <define_tool>
26+
# NOTE: approval_mode="never_require" is for sample brevity.
27+
# Use "always_require" in production for user confirmation before tool execution.
28+
@tool(approval_mode="never_require")
29+
def get_weather(
30+
location: Annotated[str, Field(description="The location to get the weather for.")],
31+
) -> str:
32+
"""Get the weather for a given location."""
33+
conditions = ["sunny", "cloudy", "rainy", "stormy"]
34+
return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C."
35+
# </define_tool>
36+
37+
38+
async def main() -> None:
39+
credential = AzureCliCredential()
40+
client = AzureOpenAIResponsesClient(
41+
project_endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
42+
deployment_name=os.environ["AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME"],
43+
credential=credential,
44+
)
45+
46+
# <create_agent_with_tools>
47+
agent = client.as_agent(
48+
name="WeatherAgent",
49+
instructions="You are a helpful weather agent. Use the get_weather tool to answer questions.",
50+
tools=get_weather,
51+
)
52+
# </create_agent_with_tools>
53+
54+
# <run_agent>
55+
result = await agent.run("What's the weather like in Seattle?")
56+
print(f"Agent: {result}")
57+
# </run_agent>
58+
59+
60+
if __name__ == "__main__":
61+
asyncio.run(main())
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
import os
5+
6+
from agent_framework.azure import AzureOpenAIResponsesClient
7+
from azure.identity import AzureCliCredential
8+
9+
"""
10+
Multi-Turn Conversations — Use AgentThread to maintain context
11+
12+
This sample shows how to keep conversation history across multiple calls
13+
by reusing the same thread object.
14+
15+
Environment variables:
16+
AZURE_AI_PROJECT_ENDPOINT — Your Azure AI Foundry project endpoint
17+
AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o)
18+
"""
19+
20+
21+
async def main() -> None:
22+
# <create_agent>
23+
credential = AzureCliCredential()
24+
client = AzureOpenAIResponsesClient(
25+
project_endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
26+
deployment_name=os.environ["AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME"],
27+
credential=credential,
28+
)
29+
30+
agent = client.as_agent(
31+
name="ConversationAgent",
32+
instructions="You are a friendly assistant. Keep your answers brief.",
33+
)
34+
# </create_agent>
35+
36+
# <multi_turn>
37+
# Create a thread to maintain conversation history
38+
thread = agent.get_new_thread()
39+
40+
# First turn
41+
result = await agent.run("My name is Alice and I love hiking.", thread=thread)
42+
print(f"Agent: {result}\n")
43+
44+
# Second turn — the agent should remember the user's name and hobby
45+
result = await agent.run("What do you remember about me?", thread=thread)
46+
print(f"Agent: {result}")
47+
# </multi_turn>
48+
49+
50+
if __name__ == "__main__":
51+
asyncio.run(main())
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
import os
5+
from collections.abc import MutableSequence
6+
from typing import Any
7+
8+
from agent_framework import Context, ContextProvider, Message
9+
from agent_framework.azure import AzureOpenAIResponsesClient
10+
from azure.identity import AzureCliCredential
11+
12+
"""
13+
Agent Memory with Context Providers
14+
15+
Context providers let you inject dynamic instructions and context into each
16+
agent invocation. This sample defines a simple provider that tracks the user's
17+
name and enriches every request with personalization instructions.
18+
19+
Environment variables:
20+
AZURE_AI_PROJECT_ENDPOINT — Your Azure AI Foundry project endpoint
21+
AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o)
22+
"""
23+
24+
25+
# <context_provider>
26+
class UserNameProvider(ContextProvider):
27+
"""A simple context provider that remembers the user's name."""
28+
29+
def __init__(self) -> None:
30+
self.user_name: str | None = None
31+
32+
async def invoking(self, messages: Message | MutableSequence[Message], **kwargs: Any) -> Context:
33+
"""Called before each agent invocation — add extra instructions."""
34+
if self.user_name:
35+
return Context(instructions=f"The user's name is {self.user_name}. Always address them by name.")
36+
return Context(instructions="You don't know the user's name yet. Ask for it politely.")
37+
38+
async def invoked(
39+
self,
40+
request_messages: Message | list[Message] | None = None,
41+
response_messages: "Message | list[Message] | None" = None,
42+
invoke_exception: Exception | None = None,
43+
**kwargs: Any,
44+
) -> None:
45+
"""Called after each agent invocation — extract information."""
46+
msgs = [request_messages] if isinstance(request_messages, Message) else list(request_messages or [])
47+
for msg in msgs:
48+
text = msg.text if hasattr(msg, "text") else ""
49+
if isinstance(text, str) and "my name is" in text.lower():
50+
# Simple extraction — production code should use structured extraction
51+
self.user_name = text.lower().split("my name is")[-1].strip().split()[0].capitalize()
52+
# </context_provider>
53+
54+
55+
async def main() -> None:
56+
# <create_agent>
57+
credential = AzureCliCredential()
58+
client = AzureOpenAIResponsesClient(
59+
project_endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
60+
deployment_name=os.environ["AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME"],
61+
credential=credential,
62+
)
63+
64+
memory = UserNameProvider()
65+
66+
agent = client.as_agent(
67+
name="MemoryAgent",
68+
instructions="You are a friendly assistant.",
69+
context_provider=memory,
70+
)
71+
# </create_agent>
72+
73+
thread = agent.get_new_thread()
74+
75+
# The provider doesn't know the user yet — it will ask for a name
76+
result = await agent.run("Hello! What's the square root of 9?", thread=thread)
77+
print(f"Agent: {result}\n")
78+
79+
# Now provide the name — the provider extracts and stores it
80+
result = await agent.run("My name is Alice", thread=thread)
81+
print(f"Agent: {result}\n")
82+
83+
# Subsequent calls are personalized
84+
result = await agent.run("What is 2 + 2?", thread=thread)
85+
print(f"Agent: {result}\n")
86+
87+
print(f"[Memory] Stored user name: {memory.user_name}")
88+
89+
90+
if __name__ == "__main__":
91+
asyncio.run(main())
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
5+
from agent_framework import (
6+
Executor,
7+
WorkflowBuilder,
8+
WorkflowContext,
9+
executor,
10+
handler,
11+
)
12+
from typing_extensions import Never
13+
14+
"""
15+
First Workflow — Chain executors with edges
16+
17+
This sample builds a minimal workflow with two steps:
18+
1. Convert text to uppercase (class-based executor)
19+
2. Reverse the text (function-based executor)
20+
21+
No external services are required.
22+
"""
23+
24+
25+
# <create_workflow>
26+
# Step 1: A class-based executor that converts text to uppercase
27+
class UpperCase(Executor):
28+
def __init__(self, id: str):
29+
super().__init__(id=id)
30+
31+
@handler
32+
async def to_upper_case(self, text: str, ctx: WorkflowContext[str]) -> None:
33+
"""Convert input to uppercase and forward to the next node."""
34+
await ctx.send_message(text.upper())
35+
36+
37+
# Step 2: A function-based executor that reverses the string and yields output
38+
@executor(id="reverse_text")
39+
async def reverse_text(text: str, ctx: WorkflowContext[Never, str]) -> None:
40+
"""Reverse the string and yield the final workflow output."""
41+
await ctx.yield_output(text[::-1])
42+
43+
44+
def create_workflow():
45+
"""Build the workflow: UpperCase → reverse_text."""
46+
upper = UpperCase(id="upper_case")
47+
return (
48+
WorkflowBuilder(start_executor=upper)
49+
.add_edge(upper, reverse_text)
50+
.build()
51+
)
52+
# </create_workflow>
53+
54+
55+
async def main() -> None:
56+
# <run_workflow>
57+
workflow = create_workflow()
58+
59+
events = await workflow.run("hello world")
60+
print(f"Output: {events.get_outputs()}")
61+
print(f"Final state: {events.get_final_state()}")
62+
# </run_workflow>
63+
64+
"""
65+
Expected output:
66+
Output: ['DLROW OLLEH']
67+
Final state: WorkflowRunState.IDLE
68+
"""
69+
70+
71+
if __name__ == "__main__":
72+
asyncio.run(main())
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
"""
4+
Host Your Agent — Azure Functions (Durable) hosting
5+
6+
This sample shows how to host an agent inside Azure Functions using the
7+
Durable Task extension. AgentFunctionApp wraps your agent behind HTTP
8+
endpoints that Azure Functions exposes automatically.
9+
10+
Prerequisites:
11+
pip install agent-framework-azurefunctions --pre
12+
Azure Functions Core Tools 4.x (for local development)
13+
Azurite storage emulator (for local development)
14+
15+
Environment variables:
16+
AZURE_OPENAI_ENDPOINT — Your Azure OpenAI endpoint
17+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o)
18+
19+
For full Azure Functions samples with host.json, local.settings.json, and
20+
multi-agent orchestrations, see samples/04-hosting/azure_functions/.
21+
"""
22+
23+
from typing import Any
24+
25+
from agent_framework.azure import AgentFunctionApp, AzureOpenAIChatClient
26+
from azure.identity import AzureCliCredential
27+
28+
29+
# <create_agent>
30+
def _create_agent() -> Any:
31+
"""Create a hosted agent backed by Azure OpenAI."""
32+
return AzureOpenAIChatClient(credential=AzureCliCredential()).as_agent(
33+
name="HostedAgent",
34+
instructions="You are a helpful assistant hosted in Azure Functions.",
35+
)
36+
# </create_agent>
37+
38+
39+
# <host_agent>
40+
# AgentFunctionApp registers your agent with Azure Functions and exposes
41+
# HTTP endpoints (e.g. POST /api/agents/HostedAgent/run) via the Durable
42+
# Task extension.
43+
app = AgentFunctionApp(agents=[_create_agent()])
44+
# </host_agent>
45+
46+
"""
47+
To run locally:
48+
1. Start the Azurite storage emulator
49+
2. Copy local.settings.json.template to local.settings.json and fill in values
50+
3. Run: func start
51+
52+
Then invoke:
53+
POST http://localhost:7071/api/agents/HostedAgent/run
54+
Content-Type: text/plain
55+
56+
Hello! What can you do?
57+
"""

0 commit comments

Comments
 (0)