Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 104 additions & 1 deletion python/packages/azure-ai/tests/test_azure_ai_client.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,67 @@
# Copyright (c) Microsoft. All rights reserved.

import os
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from typing import Annotated
from unittest.mock import AsyncMock, MagicMock, patch

import pytest
from agent_framework import (
AgentRunResponse,
AgentRunResponseUpdate,
ChatAgent,
ChatClientProtocol,
ChatMessage,
ChatOptions,
Role,
TextContent,
)
from agent_framework.exceptions import ServiceInitializationError
from azure.ai.projects.aio import AIProjectClient
from azure.ai.projects.models import (
ResponseTextFormatConfigurationJsonSchema,
)
from azure.identity.aio import AzureCliCredential
from openai.types.responses.parsed_response import ParsedResponse
from openai.types.responses.response import Response as OpenAIResponse
from pydantic import BaseModel, ConfigDict, ValidationError
from pydantic import BaseModel, ConfigDict, Field, ValidationError

from agent_framework_azure_ai import AzureAIClient, AzureAISettings

skip_if_azure_ai_integration_tests_disabled = pytest.mark.skipif(
os.getenv("RUN_INTEGRATION_TESTS", "false").lower() != "true"
or os.getenv("AZURE_AI_PROJECT_ENDPOINT", "") in ("", "https://test-project.cognitiveservices.azure.com/")
or os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "") == "",
reason=(
"No real AZURE_AI_PROJECT_ENDPOINT or AZURE_AI_MODEL_DEPLOYMENT_NAME provided; skipping integration tests."
if os.getenv("RUN_INTEGRATION_TESTS", "false").lower() == "true"
else "Integration tests are disabled."
),
)


@asynccontextmanager
async def temporary_chat_client(agent_name: str) -> AsyncIterator[AzureAIClient]:
"""Async context manager that creates an Azure AI agent and yields an `AzureAIClient`.

The underlying agent version is cleaned up automatically after use.
Tests can construct their own `ChatAgent` instances from the yielded client.
"""
endpoint = os.environ["AZURE_AI_PROJECT_ENDPOINT"]
async with (
AzureCliCredential() as credential,
AIProjectClient(endpoint=endpoint, credential=credential) as project_client,
):
chat_client = AzureAIClient(
project_client=project_client,
agent_name=agent_name,
)
try:
yield chat_client
finally:
await project_client.agents.delete(agent_name=agent_name)


def create_test_azure_ai_client(
mock_project_client: MagicMock,
Expand Down Expand Up @@ -751,3 +793,64 @@ def mock_project_client() -> MagicMock:
mock_client.close = AsyncMock()

return mock_client


def get_weather(
location: Annotated[str, Field(description="The location to get the weather for.")],
) -> str:
"""Get the weather for a given location."""
return f"The weather in {location} is sunny with a high of 25°C."


@pytest.mark.flaky
@skip_if_azure_ai_integration_tests_disabled
async def test_azure_ai_chat_client_agent_basic_run() -> None:
"""Test ChatAgent basic run functionality with AzureAIClient."""
async with (
temporary_chat_client(agent_name="BasicRunAgent") as chat_client,
ChatAgent(chat_client=chat_client) as agent,
):
response = await agent.run("Hello! Please respond with 'Hello World' exactly.")

# Validate response
assert isinstance(response, AgentRunResponse)
assert response.text is not None
assert len(response.text) > 0
assert "Hello World" in response.text


@pytest.mark.flaky
@skip_if_azure_ai_integration_tests_disabled
async def test_azure_ai_chat_client_agent_basic_run_streaming() -> None:
"""Test ChatAgent basic streaming functionality with AzureAIClient."""
async with (
temporary_chat_client(agent_name="BasicRunStreamingAgent") as chat_client,
ChatAgent(chat_client=chat_client) as agent,
):
full_message: str = ""
async for chunk in agent.run_stream("Please respond with exactly: 'This is a streaming response test.'"):
assert chunk is not None
assert isinstance(chunk, AgentRunResponseUpdate)
if chunk.text:
full_message += chunk.text

# Validate streaming response
assert len(full_message) > 0
assert "streaming response test" in full_message.lower()


@pytest.mark.flaky
@skip_if_azure_ai_integration_tests_disabled
async def test_azure_ai_chat_client_agent_with_tools() -> None:
"""Test ChatAgent tools with AzureAIClient."""
async with (
temporary_chat_client(agent_name="RunToolsAgent") as chat_client,
ChatAgent(chat_client=chat_client, tools=[get_weather]) as agent,
):
response = await agent.run("What's the weather like in Seattle?")

# Validate response
assert isinstance(response, AgentRunResponse)
assert response.text is not None
assert len(response.text) > 0
assert any(word in response.text.lower() for word in ["sunny", "25"])
2 changes: 0 additions & 2 deletions python/samples/getting_started/agents/azure_ai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ This folder contains examples demonstrating different ways to create and use age
| [`azure_ai_with_file_search.py`](azure_ai_with_file_search.py) | Shows how to use the `HostedFileSearchTool` with Azure AI agents to upload files, create vector stores, and enable agents to search through uploaded documents to answer user questions. |
| [`azure_ai_with_hosted_mcp.py`](azure_ai_with_hosted_mcp.py) | Shows how to integrate hosted Model Context Protocol (MCP) tools with Azure AI Agent. |
| [`azure_ai_with_response_format.py`](azure_ai_with_response_format.py) | Shows how to use structured outputs (response format) with Azure AI agents using Pydantic models to enforce specific response schemas. |
| [`azure_ai_with_search_context_agentic.py`](azure_ai_with_search_context_agentic.py) | Shows how to use AzureAISearchContextProvider with agentic mode. Uses Knowledge Bases for multi-hop reasoning across documents with query planning. Recommended for most scenarios - slightly slower with more token consumption for query planning, but more accurate results. |
| [`azure_ai_with_search_context_semantic.py`](azure_ai_with_search_context_semantic.py) | Shows how to use AzureAISearchContextProvider with semantic mode. Fast hybrid search with vector + keyword search and semantic ranking for RAG. Best for simple queries where speed is critical. |
| [`azure_ai_with_sharepoint.py`](azure_ai_with_sharepoint.py) | Shows how to use SharePoint grounding with Azure AI agents to search through SharePoint content and answer user questions with proper citations. Requires a SharePoint connection configured in your Azure AI project. |
| [`azure_ai_with_thread.py`](azure_ai_with_thread.py) | Demonstrates thread management with Azure AI agents, including automatic thread creation for stateless conversations and explicit thread management for maintaining conversation context across multiple interactions. |
| [`azure_ai_with_image_generation.py`](azure_ai_with_image_generation.py) | Shows how to use the `ImageGenTool` with Azure AI agents to generate images based on text prompts. |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ async def example_with_client() -> None:
AIProjectClient(endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], credential=credential) as project_client,
):
# Create a conversation using OpenAI client
openai_client = await project_client.get_openai_client()
openai_client = project_client.get_openai_client()
conversation = await openai_client.conversations.create()
conversation_id = conversation.id
print(f"Conversation ID: {conversation_id}")
Expand Down Expand Up @@ -70,7 +70,7 @@ async def example_with_thread() -> None:
) as agent,
):
# Create a conversation using OpenAI client
openai_client = await project_client.get_openai_client()
openai_client = project_client.get_openai_client()
conversation = await openai_client.conversations.create()
conversation_id = conversation.id
print(f"Conversation ID: {conversation_id}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ This folder contains examples demonstrating different ways to create and use age
| [`azure_ai_with_local_mcp.py`](azure_ai_with_local_mcp.py) | Shows how to integrate Azure AI agents with local Model Context Protocol (MCP) servers for enhanced functionality and tool integration. Demonstrates both agent-level and run-level tool configuration. |
| [`azure_ai_with_multiple_tools.py`](azure_ai_with_multiple_tools.py) | Demonstrates how to use multiple tools together with Azure AI agents, including web search, MCP servers, and function tools. Shows coordinated multi-tool interactions and approval workflows. |
| [`azure_ai_with_openapi_tools.py`](azure_ai_with_openapi_tools.py) | Demonstrates how to use OpenAPI tools with Azure AI agents to integrate external REST APIs. Shows OpenAPI specification loading, anonymous authentication, thread context management, and coordinated multi-API conversations using weather and countries APIs. |
| [`azure_ai_with_search_context_agentic.py`](azure_ai_with_search_context_agentic.py) | Shows how to use AzureAISearchContextProvider with agentic mode. Uses Knowledge Bases for multi-hop reasoning across documents with query planning. Recommended for most scenarios - slightly slower with more token consumption for query planning, but more accurate results. |
| [`azure_ai_with_search_context_semantic.py`](azure_ai_with_search_context_semantic.py) | Shows how to use AzureAISearchContextProvider with semantic mode. Fast hybrid search with vector + keyword search and semantic ranking for RAG. Best for simple queries where speed is critical. |
| [`azure_ai_with_thread.py`](azure_ai_with_thread.py) | Demonstrates thread management with Azure AI agents, including automatic thread creation for stateless conversations and explicit thread management for maintaining conversation context across multiple interactions. |

## Environment Variables
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@
import asyncio
import os

from dotenv import load_dotenv

from agent_framework import ChatAgent
from agent_framework_aisearch import AzureAISearchContextProvider
from agent_framework_azure_ai import AzureAIAgentClient
from azure.identity.aio import DefaultAzureCredential
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()
Expand Down Expand Up @@ -68,7 +67,7 @@ async def main() -> None:
endpoint=search_endpoint,
index_name=index_name,
api_key=search_key, # Use api_key for API key auth, or credential for managed identity
credential=DefaultAzureCredential() if not search_key else None,
credential=AzureCliCredential() if not search_key else None,
mode="agentic", # Advanced mode for multi-hop reasoning
# Agentic mode configuration
azure_ai_project_endpoint=project_endpoint,
Expand All @@ -87,7 +86,7 @@ async def main() -> None:
AzureAIAgentClient(
project_endpoint=project_endpoint,
model_deployment_name=model_deployment,
async_credential=DefaultAzureCredential(),
async_credential=AzureCliCredential(),
) as client,
ChatAgent(
chat_client=client,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@
import asyncio
import os

from dotenv import load_dotenv

from agent_framework import ChatAgent
from agent_framework_aisearch import AzureAISearchContextProvider
from agent_framework_azure_ai import AzureAIAgentClient
from azure.identity.aio import DefaultAzureCredential
from azure.identity.aio import AzureCliCredential
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()
Expand Down Expand Up @@ -58,7 +57,7 @@ async def main() -> None:
endpoint=search_endpoint,
index_name=index_name,
api_key=search_key, # Use api_key for API key auth, or credential for managed identity
credential=DefaultAzureCredential() if not search_key else None,
credential=AzureCliCredential() if not search_key else None,
mode="semantic", # Default mode
top_k=3, # Retrieve top 3 most relevant documents
)
Expand All @@ -69,7 +68,7 @@ async def main() -> None:
AzureAIAgentClient(
project_endpoint=project_endpoint,
model_deployment_name=model_deployment,
async_credential=DefaultAzureCredential(),
async_credential=AzureCliCredential(),
) as client,
ChatAgent(
chat_client=client,
Expand Down