Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
9113334
Initial plan
Copilot Feb 18, 2026
c07c1e2
Add load_dotenv() to 303 Python samples for environment variable loading
Copilot Feb 18, 2026
abed4ea
Update SAMPLE_GUIDELINES.md to document load_dotenv() requirement
Copilot Feb 18, 2026
cd1b8ae
Update samples README.md to document .env file usage
Copilot Feb 18, 2026
17b5890
Run ruff format on all changed sample files
Copilot Feb 18, 2026
f4061fb
Clarify load_dotenv() usage in README - local dev vs production
Copilot Feb 18, 2026
4ad989c
Remove deprecated getting_started folder as requested
Copilot Feb 18, 2026
6ff6d7a
Document env_file_path parameter for per-client configuration
Copilot Feb 18, 2026
2b798aa
Merge main branch to resolve conflicts
Copilot Feb 18, 2026
c911615
Fix run_evaluation.py file that was empty in merge commit
Copilot Feb 18, 2026
d708ea0
Remove dotnet changes from merge - out of scope for this PR
Copilot Feb 18, 2026
48f966c
Remove package and test changes from merge - only sample changes needed
Copilot Feb 18, 2026
ec1c527
Remove test_func_utils.py - only sample changes needed
Copilot Feb 18, 2026
923bc14
Revert sample files not in original changeset - keep only load_dotenv…
Copilot Feb 18, 2026
6ca80ed
Move load_dotenv() outside snippet tag in 06_host_your_agent.py
Copilot Feb 18, 2026
95fc230
Fix comment placement - move load_dotenv before code comments
Copilot Feb 18, 2026
f973138
Fix load_dotenv() placement across all samples - after docstring, bef…
Copilot Feb 18, 2026
cbfe7d7
Merge latest main branch with load_dotenv changes
Copilot Feb 18, 2026
ef6a1d9
Remove non-sample changes from merge - keep only load_dotenv additions
Copilot Feb 18, 2026
10942e4
Revert non-load_dotenv sample changes from merge
Copilot Feb 18, 2026
68d7d3f
Fix run_evaluation.py - use main's improved version (file already had…
Copilot Feb 18, 2026
cbababe
Merge branch 'main' into copilot/fix-env-variable-loading
TaoChenOSU Feb 18, 2026
deec690
Merge branch 'main' into copilot/fix-env-variable-loading
TaoChenOSU Feb 19, 2026
34ac2b2
Manual update
TaoChenOSU Feb 19, 2026
64f33c2
Merge branch 'main' into copilot/fix-env-variable-loading
TaoChenOSU Feb 19, 2026
757c263
Manual update 2
TaoChenOSU Feb 19, 2026
5cede7d
Fix Role usage and load_dotenv placement per PR review feedback
Copilot Feb 19, 2026
c5a77b9
Fix Role usage - use string literals not enum attributes
Copilot Feb 19, 2026
17a37fc
Merge branch 'main' into copilot/fix-env-variable-loading
eavanvalkenburg Feb 19, 2026
8d6d986
Fix SAMPLE_GUIDELINES.md example - load_dotenv before docstring per g…
Copilot Feb 19, 2026
d9513cc
Move load_dotenv() before docstrings in all samples per SAMPLE_GUIDEL…
Copilot Feb 19, 2026
c49ce53
Address PR review: rename files, fix placement, add session usage, re…
Copilot Feb 19, 2026
cc12c5b
Update Redis README to reference renamed file redis_history_provider.py
Copilot Feb 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion python/packages/redis/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ The `RedisChatMessageStore` provides persistent conversation storage using Redis

#### Basic Usage Examples

See the complete [Redis history provider examples](../../samples/02-agents/conversations/redis_chat_message_store_session.py) including:
See the complete [Redis history provider examples](../../samples/02-agents/conversations/redis_history_provider.py) including:
- User session management
- Conversation persistence across restarts
- Session serialization and deserialization
Expand Down
4 changes: 4 additions & 0 deletions python/samples/01-get-started/01_hello_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@

from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Hello Agent — Simplest possible agent
Expand Down
4 changes: 4 additions & 0 deletions python/samples/01-get-started/02_add_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,12 @@
from agent_framework import tool
from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv
from pydantic import Field

# Load environment variables from .env file
load_dotenv()

"""
Add Tools — Give your agent a function tool
Expand Down
4 changes: 4 additions & 0 deletions python/samples/01-get-started/03_multi_turn.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@

from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Multi-Turn Conversations — Use AgentSession to maintain context
Expand Down
4 changes: 4 additions & 0 deletions python/samples/01-get-started/04_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
from agent_framework._sessions import AgentSession, BaseContextProvider, SessionContext
from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Agent Memory with Context Providers
Expand Down
6 changes: 1 addition & 5 deletions python/samples/01-get-started/05_first_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,7 @@ async def reverse_text(text: str, ctx: WorkflowContext[Never, str]) -> None:
def create_workflow():
"""Build the workflow: UpperCase → reverse_text."""
upper = UpperCase(id="upper_case")
return (
WorkflowBuilder(start_executor=upper)
.add_edge(upper, reverse_text)
.build()
)
return WorkflowBuilder(start_executor=upper).add_edge(upper, reverse_text).build()
# </create_workflow>


Expand Down
16 changes: 9 additions & 7 deletions python/samples/01-get-started/06_host_your_agent.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Copyright (c) Microsoft. All rights reserved.

from typing import Any

from agent_framework.azure import AgentFunctionApp, AzureOpenAIChatClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""Host your agent with Azure Functions.
This sample shows the Python hosting pattern used in docs:
Expand All @@ -15,11 +24,6 @@
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME
"""

from typing import Any

from agent_framework.azure import AgentFunctionApp, AzureOpenAIChatClient
from azure.identity import AzureCliCredential


# <create_agent>
def _create_agent() -> Any:
Expand All @@ -28,8 +32,6 @@ def _create_agent() -> Any:
name="HostedAgent",
instructions="You are a helpful assistant hosted in Azure Functions.",
)


# </create_agent>

# <host_agent>
Expand Down
4 changes: 4 additions & 0 deletions python/samples/02-agents/background_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

from agent_framework import Agent
from agent_framework.openai import OpenAIResponsesClient
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""Background Responses Sample.
Expand Down
4 changes: 4 additions & 0 deletions python/samples/02-agents/chat_client/built_in_chat_clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,12 @@
from agent_framework.openai import OpenAIAssistantsClient
from azure.identity import AzureCliCredential
from azure.identity.aio import AzureCliCredential as AsyncAzureCliCredential
from dotenv import load_dotenv
from pydantic import Field

# Load environment variables from .env file
load_dotenv()

"""
Built-in Chat Clients Example
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
import asyncio

from agent_framework.openai import OpenAIChatClient
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Chat Response Cancellation Example
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,15 @@
from agent_framework.azure import AzureAIAgentClient
from agent_framework.mem0 import Mem0ContextProvider
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production; see samples/02-agents/tools/function_tool_with_approval.py and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.

# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production;
# see samples/02-agents/tools/function_tool_with_approval.py
# and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.
@tool(approval_mode="never_require")
def retrieve_company_report(company_code: str, detailed: bool) -> str:
if company_code != "CNTS":
Expand All @@ -24,6 +30,7 @@ def retrieve_company_report(company_code: str, detailed: bool) -> str:

async def main() -> None:
"""Example of memory usage with Mem0 context provider."""

print("=== Mem0 Context Provider Example ===")

# Each record in Mem0 should be associated with agent_id or user_id or application_id or thread_id.
Expand Down
9 changes: 7 additions & 2 deletions python/samples/02-agents/context_providers/mem0/mem0_oss.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,15 @@
from agent_framework.azure import AzureAIAgentClient
from agent_framework.mem0 import Mem0ContextProvider
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv
from mem0 import AsyncMemory

# Load environment variables from .env file
load_dotenv()

# NOTE: approval_mode="never_require" is for sample brevity.
# Use "always_require" in production; see samples/02-agents/tools/function_tool_with_approval.py

# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production;
# see samples/02-agents/tools/function_tool_with_approval.py
# and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.
@tool(approval_mode="never_require")
def retrieve_company_report(company_code: str, detailed: bool) -> str:
Expand All @@ -27,6 +31,7 @@ def retrieve_company_report(company_code: str, detailed: bool) -> str:

async def main() -> None:
"""Example of memory usage with local Mem0 OSS context provider."""

print("=== Mem0 Context Provider Example ===")

# Each record in Mem0 should be associated with agent_id or user_id or application_id or thread_id.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,19 @@
from agent_framework.azure import AzureAIAgentClient
from agent_framework.mem0 import Mem0ContextProvider
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production; see samples/02-agents/tools/function_tool_with_approval.py and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.

# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production;
# see samples/02-agents/tools/function_tool_with_approval.py
# and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.
@tool(approval_mode="never_require")
def get_user_preferences(user_id: str) -> str:
"""Mock function to get user preferences."""

preferences = {
"user123": "Prefers concise responses and technical details",
"user456": "Likes detailed explanations with examples",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,12 @@
from agent_framework.redis import RedisHistoryProvider
from azure.identity import AzureCliCredential
from azure.identity.aio import AzureCliCredential as AsyncAzureCliCredential
from dotenv import load_dotenv
from redis.credentials import CredentialProvider

# Load environment variables from .env file
load_dotenv()


class AzureCredentialProvider(CredentialProvider):
"""Credential provider for Azure AD authentication with Redis Enterprise."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,13 @@
from agent_framework.azure import AzureOpenAIResponsesClient
from agent_framework.redis import RedisContextProvider
from azure.identity import AzureCliCredential
from dotenv import load_dotenv
from redisvl.extensions.cache.embeddings import EmbeddingsCache
from redisvl.utils.vectorize import OpenAITextVectorizer

# Load environment variables from .env file
load_dotenv()

# Default Redis URL for local Redis Stack.
# Override via the REDIS_URL environment variable for remote or authenticated instances.
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
This example demonstrates how to use the Redis context provider to persist
conversational details. Pass it as a constructor argument to create_agent.
Note: For session history persistence, see RedisHistoryProvider in the
conversations/redis_history_provider.py sample. RedisContextProvider is for
AI context (RAG, memories), while RedisHistoryProvider stores message history.
Requirements:
- A Redis instance with RediSearch enabled (e.g., Redis Stack)
- agent-framework with the Redis extra installed: pip install "agent-framework-redis"
Expand All @@ -17,12 +21,17 @@
import asyncio
import os

from agent_framework import AgentSession
from agent_framework.azure import AzureOpenAIResponsesClient
from agent_framework.redis import RedisContextProvider
from azure.identity import AzureCliCredential
from dotenv import load_dotenv
from redisvl.extensions.cache.embeddings import EmbeddingsCache
from redisvl.utils.vectorize import OpenAITextVectorizer

# Load environment variables from .env file
load_dotenv()

# Default Redis URL for local Redis Stack.
# Override via the REDIS_URL environment variable for remote or authenticated instances.
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379")
Expand Down Expand Up @@ -73,35 +82,38 @@ async def main() -> None:
context_providers=[provider],
)

# Create a session to manage conversation state
session = agent.create_session()

# Teach a user preference; the agent writes this to the provider's memory
query = "Remember that I enjoy gumbo"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)

# Ask the agent to recall the stored preference; it should retrieve from memory
query = "What do I enjoy?"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)

query = "What did I say to you just now?"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)

query = "Remember that I have a meeting at 3pm tomorro"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)

query = "Tulips are red"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)

query = "What was the first thing I said to you this conversation?"
result = await agent.run(query)
result = await agent.run(query, session=session)
print("User: ", query)
print("Agent: ", result)
# Drop / delete the provider index in Redis
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,20 @@
from agent_framework.azure import AzureOpenAIResponsesClient
from agent_framework.redis import RedisContextProvider
from azure.identity import AzureCliCredential
from dotenv import load_dotenv
from redisvl.extensions.cache.embeddings import EmbeddingsCache
from redisvl.utils.vectorize import OpenAITextVectorizer

# Load environment variables from .env file
load_dotenv()

# Default Redis URL for local Redis Stack.
# Override via the REDIS_URL environment variable for remote or authenticated instances.
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379")


# Please set OPENAI_API_KEY to use the OpenAI vectorizer.
# For chat responses, also set AZURE_AI_PROJECT_ENDPOINT and AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME.


def create_chat_client() -> AzureOpenAIResponsesClient:
"""Create an Azure OpenAI Responses client using a Foundry project endpoint."""
return AzureOpenAIResponsesClient(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,12 @@
from agent_framework import Agent, AgentSession, BaseContextProvider, SessionContext, SupportsChatGetResponse
from agent_framework.azure import AzureOpenAIResponsesClient
from azure.identity import AzureCliCredential
from dotenv import load_dotenv
from pydantic import BaseModel

# Load environment variables from .env file
load_dotenv()


class UserInfo(BaseModel):
name: str | None = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@

from agent_framework import AgentSession, BaseHistoryProvider, Message
from agent_framework.openai import OpenAIChatClient
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Custom History Provider Example
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
from agent_framework import AgentSession
from agent_framework.openai import OpenAIChatClient
from agent_framework.redis import RedisHistoryProvider
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Redis History Provider Session Example
Expand All @@ -16,6 +20,7 @@
with Redis as the backend data store.
"""


# Default Redis URL for local Redis Stack.
# Override via the REDIS_URL environment variable for remote or authenticated instances.
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@
from agent_framework.azure import AzureAIAgentClient
from agent_framework.openai import OpenAIChatClient
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

"""
Session Suspend and Resume Example
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

from agent_framework.declarative import AgentFactory
from azure.identity import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()


async def main():
Expand Down
Loading