diff --git a/python/packages/a2a/agent_framework_a2a/_a2a_executor.py b/python/packages/a2a/agent_framework_a2a/_a2a_executor.py new file mode 100644 index 0000000000..ce522e8c25 --- /dev/null +++ b/python/packages/a2a/agent_framework_a2a/_a2a_executor.py @@ -0,0 +1,613 @@ +# Copyright (c) Microsoft. All rights reserved. + +from asyncio import CancelledError + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.events import EventQueue +from a2a.server.tasks import TaskUpdater +from a2a.types import FilePart, FileWithBytes, FileWithUri, Part, Task, TaskState, TextPart +from a2a.utils import new_task +from agent_framework import ( + AgentThread, + ChatAgent, + ChatMessage, + DataContent, + Role, + TextContent, + UriContent, + WorkflowAgent, +) +from typing_extensions import override + + +class A2AExecutor(AgentExecutor): + """Execute AI agents using the A2A (Agent-to-Agent) protocol. + + The A2aExecutor bridges AI agents built with the agent_framework library and the A2A protocol, + enabling structured agent execution with event-driven communication. It manages agent threads, + handles execution contexts, and converts agent responses into A2A protocol events. + + The executor supports both ChatAgent and WorkflowAgent types, persists conversation threads + using configurable storage, and provides comprehensive error handling with task status updates. + It handles various content types including text, binary data, and URI-based content. + + Key Features: + - Multi-turn conversation support with thread persistence + - Support for ChatAgent and WorkflowAgent agent types + - Configurable storage backend for agent threads + - Comprehensive error handling with task state management + - Support for multiple content types (text, binary data, URIs) + - Metadata preservation through message processing + - Customizable event transformation and storage + + Overrideable Methods: + The following methods can be overridden in subclasses to customize behavior: + + - save_thread(context_id, thread): Override to implement custom persistent storage backends + (e.g., database, file system, cloud storage). Called after each agent execution to persist + conversation state. + + - get_thread(context_id): Override to retrieve threads from custom persistent storage backends. + Called before agent execution to restore conversation history. + + - handle_events(message, updater): Override to implement custom event transformation logic. + This allows you to customize how agent responses are converted to A2A protocol format. + + Args: + agent: The AI agent to execute. Can be a ChatAgent for conversational tasks + or a WorkflowAgent for multistep agent workflows. + + Raises: + ValueError: If context_id or message is not provided in RequestContext during execution. + + Example: + Creating an executor and starting an A2A server: + + .. code-block:: python + + import uvicorn + from dotenv import load_dotenv + from a2a.server.apps import A2AStarletteApplication + from a2a.server.request_handlers import DefaultRequestHandler + from a2a.server.tasks import InMemoryTaskStore + from a2a.types import AgentCapabilities, AgentCard, AgentSkill + from agent_framework.a2a import A2AExecutor + from agent_framework.openai import OpenAIResponsesClient + + load_dotenv() + + # Create the agent + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + + # Create the executor + executor = A2AExecutor(agent=agent) + + # Create agent metadata + skill = AgentSkill( + id="Food_Agent", + name="Food Agent", + description="A simple agent that provides food-related information.", + tags=["food", "nutrition", "recipes"], + ) + + agent_card = AgentCard( + name="Food Agent", + description="A simple agent that provides food-related information.", + url="http://localhost:9999/", + version="1.0.0", + capabilities=AgentCapabilities(streaming=True), + skills=[skill], + ) + + # Set up the A2A server + request_handler = DefaultRequestHandler(agent_executor=executor, task_store=InMemoryTaskStore()) + + server = A2AStarletteApplication( + agent_card=agent_card, + http_handler=request_handler, + ).build() + + # Run the server + uvicorn.run(server, host="0.0.0.0", port=9999) + + Example with custom storage backend: + + .. code-block:: python + + from agent_framework import AgentThread + from agent_framework.a2a import A2AExecutor + from agent_framework.openai import OpenAIResponsesClient + + + class DatabaseBackedA2AExecutor(A2AExecutor): + '''Executor with custom database storage for agent threads.''' + + def __init__(self, agent, db_connection): + super().__init__(agent) + self.db = db_connection + + async def save_thread(self, context_id: str, thread: AgentThread) -> None: + '''Save thread to database instead of memory.''' + serialized = serialize_thread(thread) + await self.db.save(context_id, serialized) + + async def get_thread(self, context_id: str) -> AgentThread | None: + '''Retrieve thread from database.''' + serialized = await self.db.get(context_id) + if serialized: + return deserialize_thread(serialized) + return None + + + # Create executor with custom storage + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + executor = DatabaseBackedA2AExecutor(agent=agent, db_connection=my_db) + """ + + def __init__( + self, + agent: ChatAgent | WorkflowAgent, + ): + """Initialize the A2aExecutor with the specified agent. + + Sets up the executor with in-memory thread storage by default. For custom persistent storage + implementations, override the save_thread() and get_thread() methods. + + Args: + agent: The AI agent to execute (ChatAgent or WorkflowAgent). + """ + super().__init__() + self._agent_thread_storage: dict[str, AgentThread] = dict() + self._agent: ChatAgent | WorkflowAgent = agent + + async def save_thread(self, context_id: str, thread: AgentThread) -> None: + """Save the agent thread for the given context ID. + + This method persists the agent thread in the storage backend, associating it with the provided + context_id. This enables multi-turn conversations where the same context_id maintains a persistent + conversation state across multiple executions. + + By default, this method uses an in-memory dictionary for storage. For production deployments with + data persistence requirements, you can override this method to implement custom storage backends. + + Args: + context_id: The unique identifier for the conversation context. This typically represents + a user session, conversation thread, or execution context. + thread: The agent thread object containing the conversation state and history to be persisted. + + Custom Storage Implementation: + Override this method in a subclass to implement custom storage: + + .. code-block:: python + + class DatabaseBackedA2AExecutor(A2AExecutor): + async def save_thread(self, context_id: str, thread: AgentThread) -> None: + # Serialize the thread to your preferred format + serialized_thread = serialize(thread) + # Save to database, cloud storage, etc. + await self.db.save_conversation(context_id, serialized_thread) + + Example: + .. code-block:: python + + from agent_framework import ChatAgent, ChatMessage, Role + from agent_framework.openai import OpenAIResponsesClient + from agent_framework.a2a import A2AExecutor + + # Create agent and executor + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + executor = A2AExecutor(agent=agent) + + # Create and save a new thread + context_id = "user-session-123" + thread = agent.get_new_thread() + await executor.save_thread(context_id, thread) + + # The thread is now persisted and can be retrieved later + retrieved_thread = await executor.get_thread(context_id) + """ + self._agent_thread_storage[context_id] = thread + + async def get_thread(self, context_id: str) -> AgentThread | None: + """Retrieve the agent thread for the given context ID. + + This method retrieves an existing agent thread from storage based on the provided context_id. + If no thread exists for the context_id, it returns None. This enables stateful multi-turn + conversations where consecutive requests with the same context_id access the same conversation history. + + By default, this method uses an in-memory dictionary for storage. For production deployments with + data persistence requirements, you can override this method to implement custom storage backends. + + Args: + context_id: The unique identifier for the conversation context. This typically represents + a user session, conversation thread, or execution context. + + Returns: + AgentThread | None: The agent thread associated with the context_id, or None if not found. + + Custom Storage Implementation: + Override this method in a subclass to implement custom storage: + + .. code-block:: python + + class DatabaseBackedA2AExecutor(A2AExecutor): + async def get_thread(self, context_id: str) -> AgentThread | None: + # Retrieve from database, cloud storage, etc. + serialized_thread = await self.db.get_conversation(context_id) + if serialized_thread: + # Deserialize back to AgentThread + return deserialize(serialized_thread) + return None + + Example: + .. code-block:: python + + from agent_framework.openai import OpenAIResponsesClient + from agent_framework.a2a import A2AExecutor + + # Create agent and executor + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + executor = A2AExecutor(agent=agent) + + # Retrieve a thread for a specific context + context_id = "user-session-123" + thread = await executor.get_thread(context_id) + + if thread: + # Thread exists, proceed with conversation using this existing thread + pass + else: + # No thread found for this context_id, a new one will be created + pass + """ + return self._agent_thread_storage.get(context_id, None) + + async def get_agent_thread(self, task: Task) -> AgentThread: + """Get or create the agent thread for the given context. + + This method retrieves an existing agent thread from storage based on the task's context_id, + or creates a new thread if one doesn't exist. This enables multi-turn conversations where + the same context_id maintains a persistent conversation state across multiple executions. + + This method is the primary way to obtain conversation threads and is called automatically + by the execute() method during agent execution. + + Args: + task: The task object containing the context_id that identifies the conversation thread. + + Returns: + AgentThread: An existing or newly created agent thread for the given context. + + Example: + .. code-block:: python + + from agent_framework.openai import OpenAIResponsesClient + from agent_framework.a2a import A2AExecutor + from a2a.types import Task + + # Create agent and executor + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + executor = A2AExecutor(agent=agent) + + # Get or create a thread for a specific context + task1 = Task(context_id="user-session-123", id="task-1") + thread1 = await executor.get_agent_thread(task1) + + # The same context_id will return the same thread on subsequent calls + task2 = Task(context_id="user-session-123", id="task-2") + thread2 = await executor.get_agent_thread(task2) + + # thread1 and thread2 refer to the same conversation + # and maintain conversation history across both executions + """ + thread = await self.get_thread(task.context_id) + if not thread: + thread = self._agent.get_new_thread() + await self.save_thread(task.context_id, thread) + return thread + + @override + async def cancel(self, context: RequestContext, event_queue: EventQueue) -> None: + """Cancel agent execution. + + Cancellation is primarily managed by the A2A protocol layer. This method ensures + compliance with the AgentExecutor interface. When called, it signals that the current + agent execution should be terminated gracefully. + + This base implementation does not perform any cancellation logic, as cancellation is + handled by the A2A protocol infrastructure. Subclasses can override this method to + implement custom cancellation behavior if needed. + + Args: + context: The request context containing execution information about the agent being cancelled. + event_queue: The event queue for publishing cancellation events and status updates. + + Example: + Typically managed automatically by the A2A request handler: + + .. code-block:: python + + from agent_framework.openai import OpenAIResponsesClient + from agent_framework.a2a import A2AExecutor + from a2a.server.request_handlers import DefaultRequestHandler + from a2a.server.tasks import InMemoryTaskStore + + # Create executor + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + executor = A2AExecutor(agent=agent) + + # Use with request handler (handles cancellation automatically) + request_handler = DefaultRequestHandler(agent_executor=executor, task_store=InMemoryTaskStore()) + """ + # Cancellation handled at A2A protocol level + pass + + @override + async def execute(self, context: RequestContext, event_queue: EventQueue) -> None: + """Execute the agent with the given context and event queue. + + Orchestrates the agent execution process: validates inputs, retrieves or creates an agent thread, + executes the agent, processes response messages, and handles errors with appropriate task status updates. + + This method manages the complete lifecycle of a single agent execution, including: + 1. Validating that context_id and message are provided + 2. Creating a Task object if one doesn't exist + 3. Submitting the task to the event queue + 4. Retrieving or creating an agent thread for the context + 5. Creating a ChatMessage from user input + 6. Running the agent and processing response messages + 7. Converting agent responses to A2A protocol format via handle_events() + 8. Handling errors and updating task status appropriately + 9. Persisting the agent thread for future multi-turn conversations + + Args: + context: The request context containing user input and context_id. Must have both + context_id and message attributes. + event_queue: The event queue where task and status update events are published. + + Raises: + ValueError: If context_id or message is not provided in RequestContext. + + Status Updates published: + - TaskState.working: Agent execution begins and progresses + - TaskState.completed: Agent execution completes successfully + - TaskState.canceled: Execution cancelled via CancelledError + - TaskState.failed: Execution encounters an exception + + Thread Persistence: + The agent thread is automatically saved at the end of execution (in the finally block), + ensuring that conversation history is preserved for subsequent calls with the same context_id. + To customize how threads are stored, override the save_thread() method. + + Example: + Typically called by the A2A request handler. Here's a complete setup using the + agent_framework_to_a2a.py sample pattern: + + .. code-block:: python + + import uvicorn + from dotenv import load_dotenv + from a2a.server.apps import A2AStarletteApplication + from a2a.server.request_handlers import DefaultRequestHandler + from a2a.server.tasks import InMemoryTaskStore + from a2a.types import AgentCapabilities, AgentCard, AgentSkill + from agent_framework.a2a import A2AExecutor + from agent_framework.openai import OpenAIResponsesClient + + load_dotenv() + + # Create the agent + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", instructions="A simple agent that provides food-related information." + ) + + # Create the executor + executor = A2AExecutor(agent=agent) + + # Define agent metadata + skill = AgentSkill( + id="Food_Agent", + name="Food Agent", + description="A simple agent that provides food-related information.", + tags=["food", "nutrition", "recipes"], + examples=[], + ) + + agent_card = AgentCard( + name="Food Agent", + description="A simple agent that provides food-related information.", + url="http://localhost:9999/", + version="1.0.0", + capabilities=AgentCapabilities(streaming=True), + skills=[skill], + ) + + # Set up the A2A server + request_handler = DefaultRequestHandler( + agent_executor=executor, + task_store=InMemoryTaskStore(), + ) + + server = A2AStarletteApplication( + agent_card=agent_card, + http_handler=request_handler, + ).build() + + # Run the server + uvicorn.run(server, host="0.0.0.0", port=9999) + + Custom Storage and Event Handling: + For production use cases, you can customize storage and event handling: + + .. code-block:: python + + class ProductionA2AExecutor(A2AExecutor): + async def save_thread(self, context_id: str, thread: AgentThread) -> None: + # Save to persistent database + await self.db.save_conversation(context_id, thread) + + async def get_thread(self, context_id: str) -> AgentThread | None: + # Retrieve from persistent database + return await self.db.get_conversation(context_id) + + async def handle_events(self, message: ChatMessage, updater: TaskUpdater) -> None: + # Custom event transformation logic + await super().handle_events(message, updater) + # Add custom processing here + + + # Use with your custom storage + executor = ProductionA2AExecutor(agent=agent) + """ + if context.context_id is None: + raise ValueError("Context ID must be provided in the RequestContext") + if context.message is None: + raise ValueError("Message must be provided in the RequestContext") + + query = context.get_user_input() + task = context.current_task + + if not task: + task = new_task(context.message) + await event_queue.enqueue_event(task) + + updater = TaskUpdater(event_queue, task.id, context.context_id) + await updater.submit() + + agent_thread = await self.get_agent_thread(task) + agent = self._agent + try: + await updater.start_work() + # Create a ChatMessage from the query + user_message = ChatMessage(role=Role.USER, text=query) + + # Run the agent with the message + response = await agent.run(user_message, thread=agent_thread) + response_messages = response.messages + if not isinstance(response_messages, list): + response_messages = [response_messages] + for message in response_messages: + await self.handle_events(message, updater) + # Mark as complete + await updater.complete() + except CancelledError: + await updater.update_status(state=TaskState.canceled, final=True) + except Exception as e: + await updater.update_status( + state=TaskState.failed, + final=True, + message=updater.new_agent_message([Part(root=TextPart(text=str(e.args)))]), + ) + finally: + await self.save_thread(task.context_id, agent_thread) + + async def handle_events(self, message: ChatMessage, updater: TaskUpdater) -> None: + """Convert agent response messages to A2A protocol events and update task status. + + Processes ChatMessage objects returned by the agent and converts them into A2A protocol format. + Handles multiple content types (TextContent, DataContent, UriContent), preserves metadata, + and publishes status updates. USER role messages are skipped. + + This method is called automatically by execute() for each response message from the agent. + It serves as the bridge between the agent framework's ChatMessage format and the A2A protocol's + Part format, enabling seamless integration between agent responses and A2A clients. + + Args: + message: The ChatMessage returned by agent execution. Can contain multiple content items. + updater: The TaskUpdater used to publish status updates to the event queue. + + Content Types Supported: + - TextContent: Plain text responses converted to TextPart + - DataContent: Binary data/files converted to FilePart with bytes + - UriContent: External resource references converted to FilePart with URI + + Metadata Handling: + Additional message properties are preserved and passed through to the A2A protocol + via the metadata parameter in update_status() calls. + + Example: + Typically called automatically by execute(). For text response processing: + + .. code-block:: python + + from agent_framework import ChatMessage, Role, TextContent + from a2a.server.tasks import TaskUpdater + + response_message = ChatMessage( + role=Role.ASSISTANT, contents=[TextContent(text="Food information response here.")] + ) + await executor.handle_events(response_message, updater) + + Example with multiple content types: + + .. code-block:: python + + from agent_framework import ChatMessage, Role, TextContent, UriContent + from a2a.server.tasks import TaskUpdater + + response_message = ChatMessage( + role=Role.ASSISTANT, + contents=[ + TextContent(text="Here is the document:"), + UriContent(uri="https://example.com/recipe.pdf", media_type="application/pdf"), + ], + ) + await executor.handle_events(response_message, updater) + + Custom Event Transformation: + You can override this method to implement custom event transformation logic: + + .. code-block:: python + + class CustomA2AExecutor(A2AExecutor): + async def handle_events(self, message: ChatMessage, updater: TaskUpdater) -> None: + # Call parent implementation + await super().handle_events(message, updater) + + # Add custom transformation logic + if message.role == Role.ASSISTANT: + # Custom processing for assistant messages + custom_metadata = {"custom_field": "custom_value"} + await updater.update_status( + state=TaskState.working, + message=updater.new_agent_message(parts=parts, metadata=custom_metadata), + ) + + Note: + Unsupported content types are silently skipped. Only messages with at least one + supported content type will result in a status update being published. + """ + if message.role == Role.USER: + # This is a user message, we can ignore it in the context of task updates + return + + parts: list[Part] = [] + metadata = getattr(message, "additional_properties", None) + + for content in message.contents: + if isinstance(content, TextContent): + parts.append(Part(root=TextPart(text=content.text))) + if isinstance(content, DataContent): + parts.append(Part(root=FilePart(file=FileWithBytes(bytes=content.get_data_bytes_as_str())))) + if isinstance(content, UriContent): + # Handle URI content + parts.append(Part(root=FilePart(file=FileWithUri(uri=content.uri, mime_type=content.media_type)))) + # Silently skip unsupported content types + + if parts: + await updater.update_status( + state=TaskState.working, + message=updater.new_agent_message(parts=parts, metadata=metadata), + ) diff --git a/python/packages/a2a/tests/test_a2a_executor.py b/python/packages/a2a/tests/test_a2a_executor.py new file mode 100644 index 0000000000..4c0ac74b9e --- /dev/null +++ b/python/packages/a2a/tests/test_a2a_executor.py @@ -0,0 +1,1086 @@ +# Copyright (c) Microsoft. All rights reserved. +import base64 +from asyncio import CancelledError +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +from a2a.types import Task, TaskState +from agent_framework import ( + AgentRunResponse, + AgentThread, + ChatAgent, + ChatMessage, + DataContent, + Role, + TextContent, + UriContent, + WorkflowAgent, +) +from agent_framework.a2a import A2AExecutor +from pytest import fixture, raises + + +@fixture +def mock_chat_agent() -> MagicMock: + """Fixture that provides a mock ChatAgent.""" + agent = MagicMock(spec=ChatAgent) + agent.get_new_thread = MagicMock(return_value=MagicMock(spec=AgentThread)) + agent.run = AsyncMock() + return agent + + +@fixture +def mock_workflow_agent() -> MagicMock: + """Fixture that provides a mock WorkflowAgent.""" + agent = MagicMock(spec=WorkflowAgent) + agent.get_new_thread = MagicMock(return_value=MagicMock(spec=AgentThread)) + agent.run = AsyncMock() + return agent + + +@fixture +def mock_request_context() -> MagicMock: + """Fixture that provides a mock RequestContext.""" + request_context = MagicMock() + request_context.context_id = str(uuid4()) + request_context.get_user_input = MagicMock(return_value="Test query") + request_context.current_task = None + request_context.message = None + return request_context + + +@fixture +def mock_event_queue() -> MagicMock: + """Fixture that provides a mock EventQueue.""" + queue = AsyncMock() + queue.enqueue_event = AsyncMock() + return queue + + +@fixture +def mock_agent_thread() -> MagicMock: + """Fixture that provides a mock AgentThread.""" + return MagicMock(spec=AgentThread) + + +@fixture +def mock_task() -> Task: + """Fixture that provides a mock Task.""" + task = MagicMock(spec=Task) + task.id = str(uuid4()) + task.context_id = str(uuid4()) + task.state = TaskState.completed + return task + + +@fixture +def mock_task_updater() -> MagicMock: + """Fixture that provides a mock TaskUpdater.""" + updater = MagicMock() + updater.submit = AsyncMock() + updater.start_work = AsyncMock() + updater.complete = AsyncMock() + updater.update_status = AsyncMock() + updater.new_agent_message = MagicMock() + return updater + + +@fixture +def executor(mock_chat_agent: MagicMock) -> A2AExecutor: + """Fixture that provides an A2AExecutor with ChatAgent.""" + return A2AExecutor(agent=mock_chat_agent) + + +class TestA2AExecutorInitialization: + """Tests for A2AExecutor initialization.""" + + def test_initialization_with_chat_agent_only(self, mock_chat_agent: MagicMock) -> None: + """Arrange: Create mock ChatAgent + Act: Initialize A2AExecutor with only agent + Assert: Executor is created with default dict-based thread storage + """ + # Act + executor = A2AExecutor(agent=mock_chat_agent) + + # Assert + assert executor._agent is mock_chat_agent + assert isinstance(executor._agent_thread_storage, dict) + assert len(executor._agent_thread_storage) == 0 + + def test_initialization_with_workflow_agent_only(self, mock_workflow_agent: MagicMock) -> None: + """Arrange: Create mock WorkflowAgent + Act: Initialize A2AExecutor with WorkflowAgent + Assert: Executor accepts WorkflowAgent + """ + # Act + executor = A2AExecutor(agent=mock_workflow_agent) + + # Assert + assert executor._agent is mock_workflow_agent + assert isinstance(executor._agent_thread_storage, dict) + + def test_initialization_creates_empty_thread_storage(self, mock_chat_agent: MagicMock) -> None: + """Arrange: Create mock ChatAgent + Act: Initialize A2AExecutor + Assert: Executor creates empty dict-based thread storage + """ + # Act + executor = A2AExecutor(agent=mock_chat_agent) + + # Assert + assert isinstance(executor._agent_thread_storage, dict) + assert len(executor._agent_thread_storage) == 0 + + def test_initialization_thread_storage_empty_for_workflow_agent(self, mock_workflow_agent: MagicMock) -> None: + """Arrange: Create mock WorkflowAgent + Act: Initialize A2AExecutor with WorkflowAgent + Assert: Executor creates empty dict-based storage + """ + # Act + executor = A2AExecutor(agent=mock_workflow_agent) + + # Assert + assert isinstance(executor._agent_thread_storage, dict) + assert len(executor._agent_thread_storage) == 0 + + +class TestA2AExecutorCancel: + """Tests for the cancel method.""" + + async def test_cancel_method_completes( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + ) -> None: + """Arrange: Create executor with dependencies + Act: Call cancel method + Assert: Method completes without raising error + """ + # Act & Assert (should not raise) + await executor.cancel(mock_request_context, mock_event_queue) # type: ignore + + async def test_cancel_handles_different_contexts( + self, + executor: A2AExecutor, + mock_event_queue: MagicMock, + ) -> None: + """Arrange: Create executor with multiple request contexts + Act: Call cancel with different contexts + Assert: Each cancel completes successfully + """ + # Arrange + context1 = MagicMock() + context2 = MagicMock() + + # Act & Assert + await executor.cancel(context1, mock_event_queue) # type: ignore + await executor.cancel(context2, mock_event_queue) # type: ignore + + +class TestA2AExecutorThreadStorage: + """Tests for save_thread and get_thread methods.""" + + async def test_save_thread_stores_thread_with_context_id( + self, + executor: A2AExecutor, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with empty storage + Act: Save thread with context_id + Assert: Thread is stored in dict + """ + # Arrange + context_id = "test-context-123" + + # Act + await executor.save_thread(context_id, mock_agent_thread) + + # Assert + assert executor._agent_thread_storage[context_id] is mock_agent_thread + + async def test_get_thread_returns_stored_thread( + self, + executor: A2AExecutor, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with stored thread + Act: Get thread with context_id + Assert: Stored thread is returned + """ + # Arrange + context_id = "test-context-123" + executor._agent_thread_storage[context_id] = mock_agent_thread + + # Act + result = await executor.get_thread(context_id) + + # Assert + assert result is mock_agent_thread + + async def test_get_thread_returns_none_for_missing_thread( + self, + executor: A2AExecutor, + ) -> None: + """Arrange: Create executor without stored thread + Act: Get thread with non-existent context_id + Assert: None is returned + """ + # Act + result = await executor.get_thread("non-existent") + + # Assert + assert result is None + + async def test_save_and_get_thread_roundtrip( + self, + executor: A2AExecutor, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor + Act: Save and then get thread + Assert: Retrieved thread matches saved thread + """ + # Arrange + context_id = "roundtrip-test" + + # Act + await executor.save_thread(context_id, mock_agent_thread) + result = await executor.get_thread(context_id) + + # Assert + assert result is mock_agent_thread + + +class TestA2AExecutorGetAgentThread: + """Tests for the get_agent_thread method.""" + + async def test_get_agent_thread_creates_new_thread_when_not_exists( + self, + executor: A2AExecutor, + mock_task: Task, + ) -> None: + """Arrange: Create executor with empty storage + Act: Call get_agent_thread with context that has no stored thread + Assert: New thread is created and saved + """ + # Arrange + mock_thread = MagicMock(spec=AgentThread) + executor._agent.get_new_thread = MagicMock(return_value=mock_thread) + assert len(executor._agent_thread_storage) == 0 + + # Act + result = await executor.get_agent_thread(mock_task) + + # Assert + assert result is mock_thread + assert executor._agent_thread_storage[mock_task.context_id] is mock_thread + + async def test_get_agent_thread_returns_existing_thread( + self, + executor: A2AExecutor, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with existing thread in storage + Act: Call get_agent_thread with context that has stored thread + Assert: Existing thread is returned without creating new one + """ + # Arrange + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + executor._agent.get_new_thread = MagicMock() + + # Act + result = await executor.get_agent_thread(mock_task) + + # Assert + assert result is mock_agent_thread + executor._agent.get_new_thread.assert_not_called() + + async def test_get_agent_thread_with_different_contexts( + self, + executor: A2AExecutor, + ) -> None: + """Arrange: Create executor with multiple context IDs + Act: Call get_agent_thread with different context IDs + Assert: Each context maintains separate threads + """ + # Arrange + thread1 = MagicMock(spec=AgentThread) + thread2 = MagicMock(spec=AgentThread) + call_count = [0] + + def side_effect(): + call_count[0] += 1 + if call_count[0] == 1: + return thread1 + return thread2 + + executor._agent.get_new_thread = MagicMock(side_effect=side_effect) + + task1 = MagicMock(spec=Task) + task1.context_id = "context-1" + task2 = MagicMock(spec=Task) + task2.context_id = "context-2" + + # Act + result1 = await executor.get_agent_thread(task1) + result2 = await executor.get_agent_thread(task2) + + # Assert + assert result1 is thread1 + assert result2 is thread2 + assert executor._agent_thread_storage["context-1"] is thread1 + assert executor._agent_thread_storage["context-2"] is thread2 + + +class TestA2AExecutorExecute: + """Tests for the execute method.""" + + async def test_execute_with_existing_task_succeeds( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with mocked dependencies and existing task + Act: Call execute method + Assert: Execution completes successfully + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + + response_message = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="Hello back")]) + response = MagicMock(spec=AgentRunResponse) + response.messages = [response_message] + executor._agent.run = AsyncMock(return_value=response) + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="message_obj") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + mock_updater.submit.assert_called_once() + mock_updater.start_work.assert_called_once() + mock_updater.complete.assert_called_once() + + async def test_execute_creates_task_when_not_exists( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with request context without task + Act: Call execute method + Assert: New task is created and enqueued + """ + # Arrange + mock_message = MagicMock() + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = None + mock_request_context.message = mock_message + mock_request_context.context_id = "ctx-123" + + executor._agent_thread_storage["ctx-123"] = mock_agent_thread + + response_message = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="Response")]) + response = MagicMock(spec=AgentRunResponse) + response.messages = [response_message] + executor._agent.run = AsyncMock(return_value=response) + executor._agent.get_new_thread = MagicMock(return_value=mock_agent_thread) + + with patch("agent_framework_a2a._a2a_executor.new_task") as mock_new_task: + mock_task = MagicMock(spec=Task) + mock_task.id = "task-new" + mock_task.context_id = "ctx-123" + mock_new_task.return_value = mock_task + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="message_obj") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + mock_new_task.assert_called_once() + mock_event_queue.enqueue_event.assert_called_once() + + async def test_execute_raises_error_when_context_id_missing( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + ) -> None: + """Arrange: Create context without context_id + Act: Call execute method + Assert: ValueError is raised + """ + # Arrange + mock_request_context.context_id = None + mock_request_context.message = MagicMock() + + # Act & Assert + with raises(ValueError) as excinfo: + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + assert "Context ID" in str(excinfo.value) + + async def test_execute_raises_error_when_message_missing( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + ) -> None: + """Arrange: Create context without message + Act: Call execute method + Assert: ValueError is raised + """ + # Arrange + mock_request_context.context_id = "ctx-123" + mock_request_context.message = None + + # Act & Assert + with raises(ValueError) as excinfo: + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + assert "Message" in str(excinfo.value) + + async def test_execute_handles_cancelled_error( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor that raises CancelledError + Act: Call execute method + Assert: Error is caught and task is marked as canceled + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + executor._agent.run = AsyncMock(side_effect=CancelledError()) + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) # type: ignore + + # Assert + mock_updater.update_status.assert_called() + call_args_list = mock_updater.update_status.call_args_list + assert any(call[1].get("state") == TaskState.canceled for call in call_args_list) + + async def test_execute_handles_generic_exception( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor that raises generic exception + Act: Call execute method + Assert: Error is caught and task is marked as failed + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + executor._agent.run = AsyncMock(side_effect=ValueError("Test error")) + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="error_message") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + call_args_list = mock_updater.update_status.call_args_list + assert any(call[1].get("state") == TaskState.failed for call in call_args_list) + + async def test_execute_processes_multiple_response_messages( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor that returns multiple response messages + Act: Call execute method + Assert: All messages are processed through handle_events + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + + response_message1 = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="First")]) + response_message2 = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="Second")]) + response = MagicMock(spec=AgentRunResponse) + response.messages = [response_message1, response_message2] + executor._agent.run = AsyncMock(return_value=response) + + # Mock handle_events + executor.handle_events = AsyncMock() + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + assert executor.handle_events.call_count == 2 + + async def test_execute_thread_is_saved_after_completion( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with thread + Act: Call execute and complete successfully + Assert: Thread is saved in storage + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + response_message = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="Response")]) + response = MagicMock(spec=AgentRunResponse) + response.messages = [response_message] + executor._agent.run = AsyncMock(return_value=response) + executor._agent.get_new_thread = MagicMock(return_value=mock_agent_thread) + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="message_obj") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + assert executor._agent_thread_storage[mock_task.context_id] is mock_agent_thread + + async def test_execute_creates_chat_message_with_user_role( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with request + Act: Call execute method + Assert: ChatMessage is created with USER role and query text + """ + # Arrange + query_text = "Hello agent" + mock_request_context.get_user_input = MagicMock(return_value=query_text) + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor._agent_thread_storage[mock_task.context_id] = mock_agent_thread + + response_message = ChatMessage(role=Role.ASSISTANT, contents=[TextContent(text="Response")]) + response = MagicMock(spec=AgentRunResponse) + response.messages = [response_message] + executor._agent.run = AsyncMock(return_value=response) + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="message_obj") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + executor._agent.run.assert_called_once() + call_args = executor._agent.run.call_args + user_message = call_args[0][0] + assert user_message.role == Role.USER + assert user_message.text == query_text + + +class TestA2AExecutorHandleEvents: + """Tests for A2AExecutor.handle_events method.""" + + @fixture + def mock_updater(self) -> MagicMock: + """Create a mock execution context.""" + updater = MagicMock() + updater.update_status = AsyncMock() + updater.new_agent_message = MagicMock(return_value="mock_message") + return updater + + async def test_ignore_user_messages(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that messages from USER role are ignored.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="User input")], + role=Role.USER, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_not_called() + + async def test_ignore_messages_with_no_contents(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that messages with no contents are ignored.""" + # Arrange + message = ChatMessage( + contents=[], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_not_called() + + async def test_handle_text_content(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with text content.""" + # Arrange + text = "Hello, this is a test message" + message = ChatMessage( + contents=[TextContent(text=text)], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_args = mock_updater.update_status.call_args + assert call_args.kwargs["state"] == TaskState.working + assert mock_updater.new_agent_message.called + + async def test_handle_multiple_text_contents(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with multiple text contents.""" + # Arrange + message = ChatMessage( + contents=[ + TextContent(text="First message"), + TextContent(text="Second message"), + ], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + assert mock_updater.new_agent_message.called + + async def test_handle_data_content(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with data content.""" + # Arrange + data = b"test file data" + base64_data = base64.b64encode(data).decode("utf-8") + data_uri = f"data:application/octet-stream;base64,{base64_data}" + + message = ChatMessage( + contents=[DataContent(uri=data_uri)], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_args = mock_updater.update_status.call_args + assert call_args.kwargs["state"] == TaskState.working + + async def test_handle_uri_content(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with URI content.""" + # Arrange + uri = "https://example.com/file.pdf" + message = ChatMessage( + contents=[UriContent(uri=uri, media_type="pdf")], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_args = mock_updater.update_status.call_args + assert call_args.kwargs["state"] == TaskState.working + + async def test_handle_uri_content_with_media_type(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with URI content that includes media type.""" + # Arrange + uri = "https://example.com/image.jpg" + message = ChatMessage( + contents=[UriContent(uri=uri, media_type="image/jpeg")], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + + async def test_handle_mixed_content_types(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with mixed content types.""" + # Arrange + data = b"file data" + base64_data = base64.b64encode(data).decode("utf-8") + data_uri = f"data:application/octet-stream;base64,{base64_data}" + + message = ChatMessage( + contents=[ + TextContent(text="Processing file..."), + DataContent(uri=data_uri), + UriContent(uri="https://example.com/reference.pdf", media_type="pdf"), + ], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_args = mock_updater.update_status.call_args + assert call_args.kwargs["state"] == TaskState.working + + async def test_handle_with_additional_properties(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with additional properties metadata.""" + # Arrange + additional_props = {"custom_field": "custom_value", "priority": "high"} + message = ChatMessage( + contents=[TextContent(text="Test message")], + role=Role.ASSISTANT, + additional_properties=additional_props, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + mock_updater.new_agent_message.assert_called_once() + call_args = mock_updater.new_agent_message.call_args + assert call_args.kwargs["metadata"] == additional_props + + async def test_handle_with_no_additional_properties(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages without additional properties.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="Test message")], + role=Role.ASSISTANT, + additional_properties=None, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + mock_updater.new_agent_message.assert_called_once() + call_args = mock_updater.new_agent_message.call_args + assert call_args.kwargs["metadata"] == {} + + async def test_parts_list_passed_to_new_agent_message(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that parts list is correctly passed to new_agent_message.""" + # Arrange + message = ChatMessage( + contents=[ + TextContent(text="Message 1"), + TextContent(text="Message 2"), + ], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.new_agent_message.assert_called_once() + call_kwargs = mock_updater.new_agent_message.call_args.kwargs + assert "parts" in call_kwargs + parts_list = call_kwargs["parts"] + assert len(parts_list) == 2 + + async def test_unsupported_content_type_skipped(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that unsupported content types are silently skipped.""" + # Arrange + message = ChatMessage( + contents=[ + TextContent(text="Valid text"), + ], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert - should still process the valid text content + mock_updater.update_status.assert_called_once() + + async def test_no_update_status_when_no_parts_created(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that update_status is not called when no parts are created.""" + # Arrange + message = ChatMessage( + contents=[], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_not_called() + + async def test_handle_assistant_role(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with ASSISTANT role.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="Assistant response")], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + + async def test_handle_system_role(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with SYSTEM role.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="System message")], + role=Role.SYSTEM, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + + async def test_handle_empty_text_content(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with empty text content.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="")], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_kwargs = mock_updater.new_agent_message.call_args.kwargs + parts_list = call_kwargs["parts"] + assert len(parts_list) == 1 + + async def test_task_state_always_working(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test that task state is always set to working.""" + # Arrange + message = ChatMessage( + contents=[TextContent(text="Any message")], + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + call_kwargs = mock_updater.update_status.call_args.kwargs + assert call_kwargs["state"] == TaskState.working + + async def test_large_number_of_contents(self, executor: A2AExecutor, mock_updater: MagicMock) -> None: + """Test handling messages with a large number of content items.""" + # Arrange + contents = [TextContent(text=f"Message {i}") for i in range(100)] + message = ChatMessage( + contents=contents, + role=Role.ASSISTANT, + ) + + # Act + await executor.handle_events(message, mock_updater) + + # Assert + mock_updater.update_status.assert_called_once() + call_kwargs = mock_updater.new_agent_message.call_args.kwargs + parts_list = call_kwargs["parts"] + assert len(parts_list) == 100 + + +class TestA2AExecutorIntegration: + """Integration tests for A2AExecutor.""" + + async def test_full_execution_flow_with_responses( + self, + executor: A2AExecutor, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with all mocked dependencies + Act: Execute full flow from request to completion + Assert: All components interact correctly + """ + # Arrange + mock_request_context.get_user_input = MagicMock(return_value="Hello agent") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor.get_thread = AsyncMock(return_value=mock_agent_thread) + + response = MagicMock(spec=AgentRunResponse) + response_message = MagicMock(spec=ChatMessage) + response.messages = [response_message] + response_message.contents = [TextContent(text="Hello user")] + response_message.role = Role.ASSISTANT + + async def response_stream(*_args, **_kwargs): + return response + + executor._agent.run_stream = response_stream + executor.handle_events = AsyncMock() + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + mock_updater.submit.assert_called_once() + mock_updater.start_work.assert_called_once() + executor.handle_events.assert_called_once() + mock_updater.complete.assert_called_once() + + async def test_executor_with_workflow_agent( + self, + mock_workflow_agent: MagicMock, + mock_request_context: MagicMock, + mock_event_queue: MagicMock, + mock_task: Task, + mock_agent_thread: MagicMock, + ) -> None: + """Arrange: Create executor with WorkflowAgent + Act: Execute method + Assert: Executor works with WorkflowAgent + """ + # Arrange + executor = A2AExecutor(agent=mock_workflow_agent) + mock_request_context.get_user_input = MagicMock(return_value="Test") + mock_request_context.current_task = mock_task + mock_request_context.context_id = "ctx-123" + mock_request_context.message = MagicMock() + + executor.get_thread = AsyncMock(return_value=mock_agent_thread) + + response = MagicMock(spec=AgentRunResponse) + response_message = MagicMock(spec=ChatMessage) + response.messages = [response_message] + response_message.contents = [TextContent(text="Hello user")] + response_message.role = Role.ASSISTANT + + async def response_stream(*_args, **_kwargs): + return response + + executor._agent.run = response_stream + + with patch("agent_framework_a2a._a2a_executor.TaskUpdater") as mock_updater_class: + mock_updater = MagicMock() + mock_updater.submit = AsyncMock() + mock_updater.start_work = AsyncMock() + mock_updater.complete = AsyncMock() + mock_updater.update_status = AsyncMock() + mock_updater.new_agent_message = MagicMock(return_value="mock_message") + mock_updater_class.return_value = mock_updater + + # Act + await executor.execute(mock_request_context, mock_event_queue) + + # Assert + mock_updater.complete.assert_called_once() diff --git a/python/samples/04-hosting/a2a/README.md b/python/samples/04-hosting/a2a/README.md index 2ede8b8a3d..924f1109b6 100644 --- a/python/samples/04-hosting/a2a/README.md +++ b/python/samples/04-hosting/a2a/README.md @@ -11,7 +11,7 @@ For more information about the A2A protocol specification, visit: https://a2a-pr | File | Description | |------|-------------| | [`agent_with_a2a.py`](agent_with_a2a.py) | Demonstrates agent discovery, non-streaming and streaming responses using the A2A protocol. | - +| [`agent_framework_to_a2a.py`](agent_framework_to_a2a.py) | Exposes an agent_framework agent as an A2A-compliant server. Demonstrates how to wrap an agent_framework agent and expose it as an A2A service that other A2A clients can discover and communicate with. | ## Environment Variables Make sure to set the following environment variables before running the example: @@ -31,4 +31,7 @@ For quick testing and demonstration, you can use the pre-built .NET A2A servers ```powershell # Simple A2A sample (single agent) uv run python agent_with_a2a.py + +# A2A server exposing an agent_framework agent +uv run python agent_framework_to_a2a.py ``` diff --git a/python/samples/04-hosting/a2a/agent_framework_to_a2a.py b/python/samples/04-hosting/a2a/agent_framework_to_a2a.py new file mode 100644 index 0000000000..aa1bc57426 --- /dev/null +++ b/python/samples/04-hosting/a2a/agent_framework_to_a2a.py @@ -0,0 +1,59 @@ +import uvicorn + +from dotenv import load_dotenv +load_dotenv() + +from a2a.server.apps import A2AStarletteApplication +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentSkill, +) + +from agent_framework.a2a import A2AExecutor +from agent_framework.openai import OpenAIResponsesClient + +if __name__ == '__main__': + # --8<-- [start:AgentSkill] + skill = AgentSkill( + id='Food_Agent', + name='Food Agent', + description="A simple agent that provides food-related information.", + tags=['food', 'nutrition', 'recipes'], + examples=[], + ) + # --8<-- [end:AgentSkill] + + # --8<-- [start:AgentCard] + # This will be the public-facing agent card + public_agent_card = AgentCard( + name='Food Agent', + description='A simple agent that provides food-related information.', + url='http://localhost:9999/', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True), + skills=[skill], + ) + # --8<-- [end:AgentCard] + + agent = OpenAIResponsesClient().create_agent( + name="Food Agent", + instructions="A simple agent that provides food-related information.", + ) + + request_handler = DefaultRequestHandler( + agent_executor= A2AExecutor(agent), + task_store= InMemoryTaskStore(), + ) + + server = A2AStarletteApplication( + agent_card=public_agent_card, + http_handler=request_handler, + ) + + server = server.build() + # print(schemas.get_schema(server.routes)) + + uvicorn.run(server, host='0.0.0.0', port=9999) diff --git a/python/uv.lock b/python/uv.lock index 9f2e97a91e..6b24327e5e 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -1857,7 +1857,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -3085,6 +3085,79 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, ] +[[package]] +name = "librt" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/c3/cdff3c10e2e608490dc0a310ccf11ba777b3943ad4fcead2a2ade98c21e1/librt-0.6.3.tar.gz", hash = "sha256:c724a884e642aa2bbad52bb0203ea40406ad742368a5f90da1b220e970384aae", size = 54209, upload-time = "2025-11-29T14:01:56.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/84/859df8db21dedab2538ddfbe1d486dda3eb66a98c6ad7ba754a99e25e45e/librt-0.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45660d26569cc22ed30adf583389d8a0d1b468f8b5e518fcf9bfe2cd298f9dd1", size = 27294, upload-time = "2025-11-29T14:00:35.053Z" }, + { url = "https://files.pythonhosted.org/packages/f7/01/ec3971cf9c4f827f17de6729bdfdbf01a67493147334f4ef8fac68936e3a/librt-0.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54f3b2177fb892d47f8016f1087d21654b44f7fc4cf6571c1c6b3ea531ab0fcf", size = 27635, upload-time = "2025-11-29T14:00:36.496Z" }, + { url = "https://files.pythonhosted.org/packages/b4/f9/3efe201df84dd26388d2e0afa4c4dc668c8e406a3da7b7319152faf835a1/librt-0.6.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c5b31bed2c2f2fa1fcb4815b75f931121ae210dc89a3d607fb1725f5907f1437", size = 81768, upload-time = "2025-11-29T14:00:37.451Z" }, + { url = "https://files.pythonhosted.org/packages/0a/13/f63e60bc219b17f3d8f3d13423cd4972e597b0321c51cac7bfbdd5e1f7b9/librt-0.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f8ed5053ef9fb08d34f1fd80ff093ccbd1f67f147633a84cf4a7d9b09c0f089", size = 85884, upload-time = "2025-11-29T14:00:38.433Z" }, + { url = "https://files.pythonhosted.org/packages/c2/42/0068f14f39a79d1ce8a19d4988dd07371df1d0a7d3395fbdc8a25b1c9437/librt-0.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3f0e4bd9bcb0ee34fa3dbedb05570da50b285f49e52c07a241da967840432513", size = 85830, upload-time = "2025-11-29T14:00:39.418Z" }, + { url = "https://files.pythonhosted.org/packages/14/1c/87f5af3a9e6564f09e50c72f82fc3057fd42d1facc8b510a707d0438c4ad/librt-0.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8f89c8d20dfa648a3f0a56861946eb00e5b00d6b00eea14bc5532b2fcfa8ef1", size = 88086, upload-time = "2025-11-29T14:00:40.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/e5/22153b98b88a913b5b3f266f12e57df50a2a6960b3f8fcb825b1a0cfe40a/librt-0.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecc2c526547eacd20cb9fbba19a5268611dbc70c346499656d6cf30fae328977", size = 86470, upload-time = "2025-11-29T14:00:41.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/3c/ea1edb587799b1edcc22444e0630fa422e32d7aaa5bfb5115b948acc2d1c/librt-0.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fbedeb9b48614d662822ee514567d2d49a8012037fc7b4cd63f282642c2f4b7d", size = 89079, upload-time = "2025-11-29T14:00:42.882Z" }, + { url = "https://files.pythonhosted.org/packages/73/ad/50bb4ae6b07c9f3ab19653e0830a210533b30eb9a18d515efb5a2b9d0c7c/librt-0.6.3-cp310-cp310-win32.whl", hash = "sha256:0765b0fe0927d189ee14b087cd595ae636bef04992e03fe6dfdaa383866c8a46", size = 19820, upload-time = "2025-11-29T14:00:44.211Z" }, + { url = "https://files.pythonhosted.org/packages/7a/12/7426ee78f3b1dbe11a90619d54cb241ca924ca3c0ff9ade3992178e9b440/librt-0.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:8c659f9fb8a2f16dc4131b803fa0144c1dadcb3ab24bb7914d01a6da58ae2457", size = 21332, upload-time = "2025-11-29T14:00:45.427Z" }, + { url = "https://files.pythonhosted.org/packages/8b/80/bc60fd16fe24910bf5974fb914778a2e8540cef55385ab2cb04a0dfe42c4/librt-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:61348cc488b18d1b1ff9f3e5fcd5ac43ed22d3e13e862489d2267c2337285c08", size = 27285, upload-time = "2025-11-29T14:00:46.626Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/26335536ed9ba097c79cffcee148393592e55758fe76d99015af3e47a6d0/librt-0.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64645b757d617ad5f98c08e07620bc488d4bced9ced91c6279cec418f16056fa", size = 27629, upload-time = "2025-11-29T14:00:47.863Z" }, + { url = "https://files.pythonhosted.org/packages/af/fd/2dcedeacfedee5d2eda23e7a49c1c12ce6221b5d58a13555f053203faafc/librt-0.6.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:26b8026393920320bb9a811b691d73c5981385d537ffc5b6e22e53f7b65d4122", size = 82039, upload-time = "2025-11-29T14:00:49.131Z" }, + { url = "https://files.pythonhosted.org/packages/48/ff/6aa11914b83b0dc2d489f7636942a8e3322650d0dba840db9a1b455f3caa/librt-0.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d998b432ed9ffccc49b820e913c8f327a82026349e9c34fa3690116f6b70770f", size = 86560, upload-time = "2025-11-29T14:00:50.403Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/d25af61958c2c7eb978164aeba0350719f615179ba3f428b682b9a5fdace/librt-0.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e18875e17ef69ba7dfa9623f2f95f3eda6f70b536079ee6d5763ecdfe6cc9040", size = 86494, upload-time = "2025-11-29T14:00:51.383Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4b/40e75d3b258c801908e64b39788f9491635f9554f8717430a491385bd6f2/librt-0.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a218f85081fc3f70cddaed694323a1ad7db5ca028c379c214e3a7c11c0850523", size = 88914, upload-time = "2025-11-29T14:00:52.688Z" }, + { url = "https://files.pythonhosted.org/packages/97/6d/0070c81aba8a169224301c75fb5fb6c3c25ca67e6ced086584fc130d5a67/librt-0.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ef42ff4edd369e84433ce9b188a64df0837f4f69e3d34d3b34d4955c599d03f", size = 86944, upload-time = "2025-11-29T14:00:53.768Z" }, + { url = "https://files.pythonhosted.org/packages/a6/94/809f38887941b7726692e0b5a083dbdc87dbb8cf893e3b286550c5f0b129/librt-0.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e0f2b79993fec23a685b3e8107ba5f8675eeae286675a216da0b09574fa1e47", size = 89852, upload-time = "2025-11-29T14:00:54.71Z" }, + { url = "https://files.pythonhosted.org/packages/58/a3/b0e5b1cda675b91f1111d8ba941da455d8bfaa22f4d2d8963ba96ccb5b12/librt-0.6.3-cp311-cp311-win32.whl", hash = "sha256:fd98cacf4e0fabcd4005c452cb8a31750258a85cab9a59fb3559e8078da408d7", size = 19948, upload-time = "2025-11-29T14:00:55.989Z" }, + { url = "https://files.pythonhosted.org/packages/cc/73/70011c2b37e3be3ece3affd3abc8ebe5cda482b03fd6b3397906321a901e/librt-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:e17b5b42c8045867ca9d1f54af00cc2275198d38de18545edaa7833d7e9e4ac8", size = 21406, upload-time = "2025-11-29T14:00:56.874Z" }, + { url = "https://files.pythonhosted.org/packages/91/ee/119aa759290af6ca0729edf513ca390c1afbeae60f3ecae9b9d56f25a8a9/librt-0.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:87597e3d57ec0120a3e1d857a708f80c02c42ea6b00227c728efbc860f067c45", size = 20875, upload-time = "2025-11-29T14:00:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2c/b59249c566f98fe90e178baf59e83f628d6c38fb8bc78319301fccda0b5e/librt-0.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74418f718083009108dc9a42c21bf2e4802d49638a1249e13677585fcc9ca176", size = 27841, upload-time = "2025-11-29T14:00:58.925Z" }, + { url = "https://files.pythonhosted.org/packages/40/e8/9db01cafcd1a2872b76114c858f81cc29ce7ad606bc102020d6dabf470fb/librt-0.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:514f3f363d1ebc423357d36222c37e5c8e6674b6eae8d7195ac9a64903722057", size = 27844, upload-time = "2025-11-29T14:01:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/59/4d/da449d3a7d83cc853af539dee42adc37b755d7eea4ad3880bacfd84b651d/librt-0.6.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cf1115207a5049d1f4b7b4b72de0e52f228d6c696803d94843907111cbf80610", size = 84091, upload-time = "2025-11-29T14:01:01.118Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6c/f90306906fb6cc6eaf4725870f0347115de05431e1f96d35114392d31fda/librt-0.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad8ba80cdcea04bea7b78fcd4925bfbf408961e9d8397d2ee5d3ec121e20c08c", size = 88239, upload-time = "2025-11-29T14:01:02.11Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ae/473ce7b423cfac2cb503851a89d9d2195bf615f534d5912bf86feeebbee7/librt-0.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4018904c83eab49c814e2494b4e22501a93cdb6c9f9425533fe693c3117126f9", size = 88815, upload-time = "2025-11-29T14:01:03.114Z" }, + { url = "https://files.pythonhosted.org/packages/c4/6d/934df738c87fb9617cabefe4891eece585a06abe6def25b4bca3b174429d/librt-0.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8983c5c06ac9c990eac5eb97a9f03fe41dc7e9d7993df74d9e8682a1056f596c", size = 90598, upload-time = "2025-11-29T14:01:04.071Z" }, + { url = "https://files.pythonhosted.org/packages/72/89/eeaa124f5e0f431c2b39119550378ae817a4b1a3c93fd7122f0639336fff/librt-0.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7769c579663a6f8dbf34878969ac71befa42067ce6bf78e6370bf0d1194997c", size = 88603, upload-time = "2025-11-29T14:01:05.02Z" }, + { url = "https://files.pythonhosted.org/packages/4d/ed/c60b3c1cfc27d709bc0288af428ce58543fcb5053cf3eadbc773c24257f5/librt-0.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3c9a07eafdc70556f8c220da4a538e715668c0c63cabcc436a026e4e89950bf", size = 92112, upload-time = "2025-11-29T14:01:06.304Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/f56169be5f716ef4ab0277be70bcb1874b4effc262e655d85b505af4884d/librt-0.6.3-cp312-cp312-win32.whl", hash = "sha256:38320386a48a15033da295df276aea93a92dfa94a862e06893f75ea1d8bbe89d", size = 20127, upload-time = "2025-11-29T14:01:07.283Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/222750ce82bf95125529eaab585ac7e2829df252f3cfc05d68792fb1dd2c/librt-0.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:c0ecf4786ad0404b072196b5df774b1bb23c8aacdcacb6c10b4128bc7b00bd01", size = 21545, upload-time = "2025-11-29T14:01:08.184Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/f731ddcfb72f446a92a8674c6b8e1e2242773cce43a04f41549bd8b958ff/librt-0.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:9f2a6623057989ebc469cd9cc8fe436c40117a0147627568d03f84aef7854c55", size = 20946, upload-time = "2025-11-29T14:01:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/dd/aa/3055dd440f8b8b3b7e8624539a0749dd8e1913e978993bcca9ce7e306231/librt-0.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9e716f9012148a81f02f46a04fc4c663420c6fbfeacfac0b5e128cf43b4413d3", size = 27874, upload-time = "2025-11-29T14:01:10.615Z" }, + { url = "https://files.pythonhosted.org/packages/ef/93/226d7dd455eaa4c26712b5ccb2dfcca12831baa7f898c8ffd3a831e29fda/librt-0.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:669ff2495728009a96339c5ad2612569c6d8be4474e68f3f3ac85d7c3261f5f5", size = 27852, upload-time = "2025-11-29T14:01:11.535Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8b/db9d51191aef4e4cc06285250affe0bb0ad8b2ed815f7ca77951655e6f02/librt-0.6.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:349b6873ebccfc24c9efd244e49da9f8a5c10f60f07575e248921aae2123fc42", size = 84264, upload-time = "2025-11-29T14:01:12.461Z" }, + { url = "https://files.pythonhosted.org/packages/8d/53/297c96bda3b5a73bdaf748f1e3ae757edd29a0a41a956b9c10379f193417/librt-0.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c74c26736008481c9f6d0adf1aedb5a52aff7361fea98276d1f965c0256ee70", size = 88432, upload-time = "2025-11-29T14:01:13.405Z" }, + { url = "https://files.pythonhosted.org/packages/54/3a/c005516071123278e340f22de72fa53d51e259d49215295c212da16c4dc2/librt-0.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:408a36ddc75e91918cb15b03460bdc8a015885025d67e68c6f78f08c3a88f522", size = 89014, upload-time = "2025-11-29T14:01:14.373Z" }, + { url = "https://files.pythonhosted.org/packages/8e/9b/ea715f818d926d17b94c80a12d81a79e95c44f52848e61e8ca1ff29bb9a9/librt-0.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e61ab234624c9ffca0248a707feffe6fac2343758a36725d8eb8a6efef0f8c30", size = 90807, upload-time = "2025-11-29T14:01:15.377Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fc/4e2e4c87e002fa60917a8e474fd13c4bac9a759df82be3778573bb1ab954/librt-0.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:324462fe7e3896d592b967196512491ec60ca6e49c446fe59f40743d08c97917", size = 88890, upload-time = "2025-11-29T14:01:16.633Z" }, + { url = "https://files.pythonhosted.org/packages/70/7f/c7428734fbdfd4db3d5b9237fc3a857880b2ace66492836f6529fef25d92/librt-0.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36b2ec8c15030002c7f688b4863e7be42820d7c62d9c6eece3db54a2400f0530", size = 92300, upload-time = "2025-11-29T14:01:17.658Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0c/738c4824fdfe74dc0f95d5e90ef9e759d4ecf7fd5ba964d54a7703322251/librt-0.6.3-cp313-cp313-win32.whl", hash = "sha256:25b1b60cb059471c0c0c803e07d0dfdc79e41a0a122f288b819219ed162672a3", size = 20159, upload-time = "2025-11-29T14:01:18.61Z" }, + { url = "https://files.pythonhosted.org/packages/f2/95/93d0e61bc617306ecf4c54636b5cbde4947d872563565c4abdd9d07a39d3/librt-0.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:10a95ad074e2a98c9e4abc7f5b7d40e5ecbfa84c04c6ab8a70fabf59bd429b88", size = 21484, upload-time = "2025-11-29T14:01:19.506Z" }, + { url = "https://files.pythonhosted.org/packages/10/23/abd7ace79ab54d1dbee265f13529266f686a7ce2d21ab59a992f989009b6/librt-0.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:17000df14f552e86877d67e4ab7966912224efc9368e998c96a6974a8d609bf9", size = 20935, upload-time = "2025-11-29T14:01:20.415Z" }, + { url = "https://files.pythonhosted.org/packages/83/14/c06cb31152182798ed98be73f54932ab984894f5a8fccf9b73130897a938/librt-0.6.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8e695f25d1a425ad7a272902af8ab8c8d66c1998b177e4b5f5e7b4e215d0c88a", size = 27566, upload-time = "2025-11-29T14:01:21.609Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/ce83ca7b057b06150519152f53a0b302d7c33c8692ce2f01f669b5a819d9/librt-0.6.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e84a4121a7ae360ca4da436548a9c1ca8ca134a5ced76c893cc5944426164bd", size = 27753, upload-time = "2025-11-29T14:01:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ec/739a885ef0a2839b6c25f1b01c99149d2cb6a34e933ffc8c051fcd22012e/librt-0.6.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:05f385a414de3f950886ea0aad8f109650d4b712cf9cc14cc17f5f62a9ab240b", size = 83178, upload-time = "2025-11-29T14:01:23.555Z" }, + { url = "https://files.pythonhosted.org/packages/db/bd/dc18bb1489d48c0911b9f4d72eae2d304ea264e215ba80f1e6ba4a9fc41d/librt-0.6.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36a8e337461150b05ca2c7bdedb9e591dfc262c5230422cea398e89d0c746cdc", size = 87266, upload-time = "2025-11-29T14:01:24.532Z" }, + { url = "https://files.pythonhosted.org/packages/94/f3/d0c5431b39eef15e48088b2d739ad84b17c2f1a22c0345c6d4c4a42b135e/librt-0.6.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcbe48f6a03979384f27086484dc2a14959be1613cb173458bd58f714f2c48f3", size = 87623, upload-time = "2025-11-29T14:01:25.798Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/9a52e90834e4bd6ee16cdbaf551cb32227cbaad27398391a189c489318bc/librt-0.6.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4bca9e4c260233fba37b15c4ec2f78aa99c1a79fbf902d19dd4a763c5c3fb751", size = 89436, upload-time = "2025-11-29T14:01:26.769Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8a/a7e78e46e8486e023c50f21758930ef4793999115229afd65de69e94c9cc/librt-0.6.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:760c25ed6ac968e24803eb5f7deb17ce026902d39865e83036bacbf5cf242aa8", size = 87540, upload-time = "2025-11-29T14:01:27.756Z" }, + { url = "https://files.pythonhosted.org/packages/49/01/93799044a1cccac31f1074b07c583e181829d240539657e7f305ae63ae2a/librt-0.6.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4a93a353ccff20df6e34fa855ae8fd788832c88f40a9070e3ddd3356a9f0e", size = 90597, upload-time = "2025-11-29T14:01:29.35Z" }, + { url = "https://files.pythonhosted.org/packages/a7/29/00c7f58b8f8eb1bad6529ffb6c9cdcc0890a27dac59ecda04f817ead5277/librt-0.6.3-cp314-cp314-win32.whl", hash = "sha256:cb92741c2b4ea63c09609b064b26f7f5d9032b61ae222558c55832ec3ad0bcaf", size = 18955, upload-time = "2025-11-29T14:01:30.325Z" }, + { url = "https://files.pythonhosted.org/packages/d7/13/2739e6e197a9f751375a37908a6a5b0bff637b81338497a1bcb5817394da/librt-0.6.3-cp314-cp314-win_amd64.whl", hash = "sha256:fdcd095b1b812d756fa5452aca93b962cf620694c0cadb192cec2bb77dcca9a2", size = 20263, upload-time = "2025-11-29T14:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/e1/73/393868fc2158705ea003114a24e73bb10b03bda31e9ad7b5c5ec6575338b/librt-0.6.3-cp314-cp314-win_arm64.whl", hash = "sha256:822ca79e28720a76a935c228d37da6579edef048a17cd98d406a2484d10eda78", size = 19575, upload-time = "2025-11-29T14:01:32.229Z" }, + { url = "https://files.pythonhosted.org/packages/48/6d/3c8ff3dec21bf804a205286dd63fd28dcdbe00b8dd7eb7ccf2e21a40a0b0/librt-0.6.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:078cd77064d1640cb7b0650871a772956066174d92c8aeda188a489b58495179", size = 28732, upload-time = "2025-11-29T14:01:33.165Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/e214b8b4aa34ed3d3f1040719c06c4d22472c40c5ef81a922d5af7876eb4/librt-0.6.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5cc22f7f5c0cc50ed69f4b15b9c51d602aabc4500b433aaa2ddd29e578f452f7", size = 29065, upload-time = "2025-11-29T14:01:34.088Z" }, + { url = "https://files.pythonhosted.org/packages/ab/90/ef61ed51f0a7770cc703422d907a757bbd8811ce820c333d3db2fd13542a/librt-0.6.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:14b345eb7afb61b9fdcdfda6738946bd11b8e0f6be258666b0646af3b9bb5916", size = 93703, upload-time = "2025-11-29T14:01:35.057Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ae/c30bb119c35962cbe9a908a71da99c168056fc3f6e9bbcbc157d0b724d89/librt-0.6.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d46aa46aa29b067f0b8b84f448fd9719aaf5f4c621cc279164d76a9dc9ab3e8", size = 98890, upload-time = "2025-11-29T14:01:36.031Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/47a4a78d252d36f072b79d592df10600d379a895c3880c8cbd2ac699f0ad/librt-0.6.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b51ba7d9d5d9001494769eca8c0988adce25d0a970c3ba3f2eb9df9d08036fc", size = 98255, upload-time = "2025-11-29T14:01:37.058Z" }, + { url = "https://files.pythonhosted.org/packages/e5/28/779b5cc3cd9987683884eb5f5672e3251676bebaaae6b7da1cf366eb1da1/librt-0.6.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ced0925a18fddcff289ef54386b2fc230c5af3c83b11558571124bfc485b8c07", size = 100769, upload-time = "2025-11-29T14:01:38.413Z" }, + { url = "https://files.pythonhosted.org/packages/28/d7/771755e57c375cb9d25a4e106f570607fd856e2cb91b02418db1db954796/librt-0.6.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6bac97e51f66da2ca012adddbe9fd656b17f7368d439de30898f24b39512f40f", size = 98580, upload-time = "2025-11-29T14:01:39.459Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ec/8b157eb8fbc066339a2f34b0aceb2028097d0ed6150a52e23284a311eafe/librt-0.6.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b2922a0e8fa97395553c304edc3bd36168d8eeec26b92478e292e5d4445c1ef0", size = 101706, upload-time = "2025-11-29T14:01:40.474Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/4aaead9a06c795a318282aebf7d3e3e578fa889ff396e1b640c3be4c7806/librt-0.6.3-cp314-cp314t-win32.whl", hash = "sha256:f33462b19503ba68d80dac8a1354402675849259fb3ebf53b67de86421735a3a", size = 19465, upload-time = "2025-11-29T14:01:41.77Z" }, + { url = "https://files.pythonhosted.org/packages/3a/61/b7e6a02746c1731670c19ba07d86da90b1ae45d29e405c0b5615abf97cde/librt-0.6.3-cp314-cp314t-win_amd64.whl", hash = "sha256:04f8ce401d4f6380cfc42af0f4e67342bf34c820dae01343f58f472dbac75dcf", size = 21042, upload-time = "2025-11-29T14:01:42.865Z" }, + { url = "https://files.pythonhosted.org/packages/0e/3d/72cc9ec90bb80b5b1a65f0bb74a0f540195837baaf3b98c7fa4a7aa9718e/librt-0.6.3-cp314-cp314t-win_arm64.whl", hash = "sha256:afb39550205cc5e5c935762c6bf6a2bb34f7d21a68eadb25e2db7bf3593fecc0", size = 20246, upload-time = "2025-11-29T14:01:44.13Z" }, +] + [[package]] name = "litellm" version = "1.81.15" @@ -3093,6 +3166,7 @@ dependencies = [ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "fastuuid", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "importlib-metadata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" },