From 625de278d81fc39b67e8af211276bed00ec96fd0 Mon Sep 17 00:00:00 2001 From: Giles Odigwe Date: Mon, 9 Feb 2026 10:13:24 -0800 Subject: [PATCH 1/2] standardized typevar to use suffix T --- .../ag-ui/agent_framework_ag_ui/_client.py | 18 +- .../ag-ui/agent_framework_ag_ui/_types.py | 6 +- .../agents/ui_generator_agent.py | 4 +- python/packages/ag-ui/tests/ag_ui/conftest.py | 18 +- .../agent_framework_anthropic/_chat_client.py | 18 +- .../_agent_provider.py | 22 +- .../agent_framework_azure_ai/_chat_client.py | 18 +- .../agent_framework_azure_ai/_client.py | 20 +- .../_project_provider.py | 22 +- .../packages/azurefunctions/tests/test_app.py | 36 +-- .../azurefunctions/tests/test_entities.py | 2 +- .../agent_framework_bedrock/_chat_client.py | 16 +- .../claude/agent_framework_claude/_agent.py | 20 +- .../packages/core/agent_framework/_agents.py | 34 +-- .../packages/core/agent_framework/_clients.py | 44 ++-- .../core/agent_framework/_middleware.py | 40 ++-- .../core/agent_framework/_pydantic.py | 4 +- .../core/agent_framework/_serialization.py | 12 +- .../packages/core/agent_framework/_threads.py | 12 +- .../packages/core/agent_framework/_tools.py | 22 +- .../packages/core/agent_framework/_types.py | 220 +++++++++--------- .../_workflows/_model_utils.py | 6 +- .../azure/_assistants_client.py | 6 +- .../agent_framework/azure/_chat_client.py | 22 +- .../azure/_responses_client.py | 14 +- .../core/agent_framework/observability.py | 20 +- .../openai/_assistant_provider.py | 22 +- .../openai/_assistants_client.py | 18 +- .../agent_framework/openai/_chat_client.py | 20 +- .../openai/_responses_client.py | 22 +- python/packages/core/tests/core/conftest.py | 12 +- .../agent_framework_declarative/_models.py | 12 +- python/packages/devui/tests/devui/conftest.py | 4 +- .../tests/test_durable_entities.py | 6 +- .../_foundry_local_client.py | 18 +- .../agent_framework_github_copilot/_agent.py | 20 +- .../agent_framework_ollama/_chat_client.py | 14 +- .../agent_framework_purview/_models.py | 4 +- .../chat_client/custom_chat_client.py | 14 +- ...onfigure_otel_providers_with_parameters.py | 12 +- 40 files changed, 440 insertions(+), 434 deletions(-) diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_client.py b/python/packages/ag-ui/agent_framework_ag_ui/_client.py index 8a9755fad9..26f9294837 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_client.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_client.py @@ -57,17 +57,17 @@ def _unwrap_server_function_call_contents(contents: MutableSequence[Content | di contents[idx] = content.function_call # type: ignore[assignment, union-attr] -TBaseChatClient = TypeVar("TBaseChatClient", bound=type[BaseChatClient[Any]]) +BaseChatClientT = TypeVar("BaseChatClientT", bound=type[BaseChatClient[Any]]) -TAGUIChatOptions = TypeVar( - "TAGUIChatOptions", +AGUIChatOptionsT = TypeVar( + "AGUIChatOptionsT", bound=TypedDict, # type: ignore[valid-type] default="AGUIChatOptions", covariant=True, ) -def _apply_server_function_call_unwrap(chat_client: TBaseChatClient) -> TBaseChatClient: +def _apply_server_function_call_unwrap(chat_client: BaseChatClientT) -> BaseChatClientT: """Class decorator that unwraps server-side function calls after tool handling.""" original_get_response = chat_client.get_response @@ -109,11 +109,11 @@ def _map_update(update: ChatResponseUpdate) -> ChatResponseUpdate: @_apply_server_function_call_unwrap class AGUIChatClient( - ChatMiddlewareLayer[TAGUIChatOptions], - FunctionInvocationLayer[TAGUIChatOptions], - ChatTelemetryLayer[TAGUIChatOptions], - BaseChatClient[TAGUIChatOptions], - Generic[TAGUIChatOptions], + ChatMiddlewareLayer[AGUIChatOptionsT], + FunctionInvocationLayer[AGUIChatOptionsT], + ChatTelemetryLayer[AGUIChatOptionsT], + BaseChatClient[AGUIChatOptionsT], + Generic[AGUIChatOptionsT], ): """Chat client for communicating with AG-UI compliant servers. diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_types.py b/python/packages/ag-ui/agent_framework_ag_ui/_types.py index 928a755b31..383bf78b5a 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_types.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_types.py @@ -18,8 +18,8 @@ from typing_extensions import TypedDict # type: ignore # pragma: no cover -TAGUIChatOptions = TypeVar("TAGUIChatOptions", bound=TypedDict, default="AGUIChatOptions", covariant=True) # type: ignore[valid-type] -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +AGUIChatOptionsT = TypeVar("AGUIChatOptionsT", bound=TypedDict, default="AGUIChatOptions", covariant=True) # type: ignore[valid-type] +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) class PredictStateConfig(TypedDict): @@ -84,7 +84,7 @@ class AGUIRequest(BaseModel): # region AG-UI Chat Options TypedDict -class AGUIChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class AGUIChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """AG-UI protocol-specific chat options dict. Extends base ChatOptions for the AG-UI (Agent-UI) protocol. diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py index 01b333e7f4..1f8d31c58e 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py @@ -163,10 +163,10 @@ For other requests, use the appropriate tool (create_chart, display_timeline, show_comparison_table). """ -TOptions = TypeVar("TOptions", bound=TypedDict, default="ChatOptions") # type: ignore[valid-type] +OptionsT = TypeVar("OptionsT", bound=TypedDict, default="ChatOptions") # type: ignore[valid-type] -def ui_generator_agent(chat_client: ChatClientProtocol[TOptions]) -> AgentFrameworkAgent: +def ui_generator_agent(chat_client: ChatClientProtocol[OptionsT]) -> AgentFrameworkAgent: """Create a UI generator agent with custom React component rendering. Args: diff --git a/python/packages/ag-ui/tests/ag_ui/conftest.py b/python/packages/ag-ui/tests/ag_ui/conftest.py index 176f4c031d..4612750f5f 100644 --- a/python/packages/ag-ui/tests/ag_ui/conftest.py +++ b/python/packages/ag-ui/tests/ag_ui/conftest.py @@ -21,7 +21,7 @@ Content, SupportsAgentRun, ) -from agent_framework._clients import TOptions_co +from agent_framework._clients import OptionsCoT from agent_framework._middleware import ChatMiddlewareLayer from agent_framework._tools import FunctionInvocationLayer from agent_framework._types import ResponseStream @@ -37,11 +37,11 @@ class StreamingChatClientStub( - ChatMiddlewareLayer[TOptions_co], - FunctionInvocationLayer[TOptions_co], - ChatTelemetryLayer[TOptions_co], - BaseChatClient[TOptions_co], - Generic[TOptions_co], + ChatMiddlewareLayer[OptionsCoT], + FunctionInvocationLayer[OptionsCoT], + ChatTelemetryLayer[OptionsCoT], + BaseChatClient[OptionsCoT], + Generic[OptionsCoT], ): """Typed streaming stub that satisfies ChatClientProtocol.""" @@ -68,7 +68,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: TOptions_co | ChatOptions[None] | None = ..., + options: OptionsCoT | ChatOptions[None] | None = ..., **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -78,7 +78,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: TOptions_co | ChatOptions[Any] | None = ..., + options: OptionsCoT | ChatOptions[Any] | None = ..., **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -87,7 +87,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: self.last_thread = kwargs.get("thread") diff --git a/python/packages/anthropic/agent_framework_anthropic/_chat_client.py b/python/packages/anthropic/agent_framework_anthropic/_chat_client.py index 5f3dfa83c5..850c3cea88 100644 --- a/python/packages/anthropic/agent_framework_anthropic/_chat_client.py +++ b/python/packages/anthropic/agent_framework_anthropic/_chat_client.py @@ -76,7 +76,7 @@ BETA_FLAGS: Final[list[str]] = ["mcp-client-2025-04-04", "code-execution-2025-08-25"] STRUCTURED_OUTPUTS_BETA_FLAG: Final[str] = "structured-outputs-2025-11-13" -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region Anthropic Chat Options TypedDict @@ -100,7 +100,7 @@ class ThinkingConfig(TypedDict, total=False): budget_tokens: int -class AnthropicChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class AnthropicChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """Anthropic-specific chat options. Extends ChatOptions with options specific to Anthropic's Messages API. @@ -158,8 +158,8 @@ class AnthropicChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], conversation_id: None # type: ignore[misc] -TAnthropicOptions = TypeVar( - "TAnthropicOptions", +AnthropicOptionsT = TypeVar( + "AnthropicOptionsT", bound=TypedDict, # type: ignore[valid-type] default="AnthropicChatOptions", covariant=True, @@ -230,11 +230,11 @@ class AnthropicSettings(AFBaseSettings): class AnthropicClient( - ChatMiddlewareLayer[TAnthropicOptions], - FunctionInvocationLayer[TAnthropicOptions], - ChatTelemetryLayer[TAnthropicOptions], - BaseChatClient[TAnthropicOptions], - Generic[TAnthropicOptions], + ChatMiddlewareLayer[AnthropicOptionsT], + FunctionInvocationLayer[AnthropicOptionsT], + ChatTelemetryLayer[AnthropicOptionsT], + BaseChatClient[AnthropicOptionsT], + Generic[AnthropicOptionsT], ): """Anthropic Chat client with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py index d30a43910d..73d06ab4a2 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py @@ -38,15 +38,15 @@ # Type variable for options - allows typed ChatAgent[TOptions] returns # Default matches AzureAIAgentClient's default options type -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="AzureAIAgentOptions", covariant=True, ) -class AzureAIAgentsProvider(Generic[TOptions_co]): +class AzureAIAgentsProvider(Generic[OptionsCoT]): """Provider for Azure AI Agent Service V1 (Persistent Agents API). This provider enables creating, retrieving, and wrapping Azure AI agents as ChatAgent @@ -174,10 +174,10 @@ async def create_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a new agent on the Azure AI service and return a ChatAgent. This method creates a persistent agent on the Azure AI service with the specified @@ -271,10 +271,10 @@ async def get_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Retrieve an existing agent from the service and return a ChatAgent. This method fetches an agent by ID from the Azure AI service @@ -327,10 +327,10 @@ def as_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Wrap an existing Agent SDK object as a ChatAgent without making HTTP calls. Use this method when you already have an Agent object from a previous @@ -380,10 +380,10 @@ def _to_chat_agent_from_agent( self, agent: Agent, provided_tools: Sequence[ToolProtocol | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a ChatAgent from an Agent SDK object. Args: diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py b/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py index d37975e1fb..1ee6952e8a 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py @@ -191,8 +191,8 @@ class AzureAIAgentOptions(ChatOptions, total=False): } """Maps ChatOptions keys to Azure AI Agents API parameter names.""" -TAzureAIAgentOptions = TypeVar( - "TAzureAIAgentOptions", +AzureAIAgentOptionsT = TypeVar( + "AzureAIAgentOptionsT", bound=TypedDict, # type: ignore[valid-type] default="AzureAIAgentOptions", covariant=True, @@ -203,11 +203,11 @@ class AzureAIAgentOptions(ChatOptions, total=False): class AzureAIAgentClient( - ChatMiddlewareLayer[TAzureAIAgentOptions], - FunctionInvocationLayer[TAzureAIAgentOptions], - ChatTelemetryLayer[TAzureAIAgentOptions], - BaseChatClient[TAzureAIAgentOptions], - Generic[TAzureAIAgentOptions], + ChatMiddlewareLayer[AzureAIAgentOptionsT], + FunctionInvocationLayer[AzureAIAgentOptionsT], + ChatTelemetryLayer[AzureAIAgentOptionsT], + BaseChatClient[AzureAIAgentOptionsT], + Generic[AzureAIAgentOptionsT], ): """Azure AI Agent Chat client with middleware, telemetry, and function invocation support.""" @@ -1296,12 +1296,12 @@ def as_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TAzureAIAgentOptions | Mapping[str, Any] | None = None, + default_options: AzureAIAgentOptionsT | Mapping[str, Any] | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, middleware: Sequence[MiddlewareTypes] | None = None, **kwargs: Any, - ) -> ChatAgent[TAzureAIAgentOptions]: + ) -> ChatAgent[AzureAIAgentOptionsT]: """Convert this chat client to a ChatAgent. This method creates a ChatAgent instance with this client pre-configured. diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_client.py b/python/packages/azure-ai/agent_framework_azure_ai/_client.py index 8c0043808e..b60351c3f8 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_client.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_client.py @@ -58,15 +58,15 @@ class AzureAIProjectAgentOptions(OpenAIResponsesOptions, total=False): """Configuration for enabling reasoning capabilities (requires azure.ai.projects.models.Reasoning).""" -TAzureAIClientOptions = TypeVar( - "TAzureAIClientOptions", +AzureAIClientOptionsT = TypeVar( + "AzureAIClientOptionsT", bound=TypedDict, # type: ignore[valid-type] default="AzureAIProjectAgentOptions", covariant=True, ) -class RawAzureAIClient(RawOpenAIResponsesClient[TAzureAIClientOptions], Generic[TAzureAIClientOptions]): +class RawAzureAIClient(RawOpenAIResponsesClient[AzureAIClientOptionsT], Generic[AzureAIClientOptionsT]): """Raw Azure AI client without middleware, telemetry, or function invocation layers. Warning: @@ -568,12 +568,12 @@ def as_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TAzureAIClientOptions | Mapping[str, Any] | None = None, + default_options: AzureAIClientOptionsT | Mapping[str, Any] | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, middleware: Sequence[MiddlewareTypes] | None = None, **kwargs: Any, - ) -> ChatAgent[TAzureAIClientOptions]: + ) -> ChatAgent[AzureAIClientOptionsT]: """Convert this chat client to a ChatAgent. This method creates a ChatAgent instance with this client pre-configured. @@ -613,11 +613,11 @@ def as_agent( class AzureAIClient( - ChatMiddlewareLayer[TAzureAIClientOptions], - FunctionInvocationLayer[TAzureAIClientOptions], - ChatTelemetryLayer[TAzureAIClientOptions], - RawAzureAIClient[TAzureAIClientOptions], - Generic[TAzureAIClientOptions], + ChatMiddlewareLayer[AzureAIClientOptionsT], + FunctionInvocationLayer[AzureAIClientOptionsT], + ChatTelemetryLayer[AzureAIClientOptionsT], + RawAzureAIClient[AzureAIClientOptionsT], + Generic[AzureAIClientOptionsT], ): """Azure AI client with middleware, telemetry, and function invocation support. diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py index 0a5e2f79f6..6b4950aa34 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py @@ -47,15 +47,15 @@ # Type variable for options - allows typed ChatAgent[TOptions] returns # Default matches AzureAIClient's default options type -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="AzureAIProjectAgentOptions", covariant=True, ) -class AzureAIProjectAgentProvider(Generic[TOptions_co]): +class AzureAIProjectAgentProvider(Generic[OptionsCoT]): """Provider for Azure AI Agent Service (Responses API). This provider allows you to create, retrieve, and manage Azure AI agents @@ -165,10 +165,10 @@ async def create_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a new agent on the Azure AI service and return a local ChatAgent wrapper. Args: @@ -267,10 +267,10 @@ async def get_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Retrieve an existing agent from the Azure AI service and return a local ChatAgent wrapper. You must provide either name or reference. Use `as_agent()` if you already have @@ -327,10 +327,10 @@ def as_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Wrap an SDK agent version object into a ChatAgent without making HTTP calls. Use this when you already have an AgentVersionDetails from a previous API call. @@ -367,10 +367,10 @@ def _to_chat_agent_from_details( self, details: AgentVersionDetails, provided_tools: Sequence[ToolProtocol | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a ChatAgent from an AgentVersionDetails. Args: diff --git a/python/packages/azurefunctions/tests/test_app.py b/python/packages/azurefunctions/tests/test_app.py index f8b414fc34..5a454e6217 100644 --- a/python/packages/azurefunctions/tests/test_app.py +++ b/python/packages/azurefunctions/tests/test_app.py @@ -27,10 +27,10 @@ from agent_framework_azurefunctions import AgentFunctionApp from agent_framework_azurefunctions._entities import create_agent_entity -TFunc = TypeVar("TFunc", bound=Callable[..., Any]) +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) -def _identity_decorator(func: TFunc) -> TFunc: +def _identity_decorator(func: FuncT) -> FuncT: return func @@ -165,8 +165,8 @@ def test_setup_creates_http_trigger(self) -> None: mock_agent = Mock() mock_agent.name = "TestAgent" - def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: return func return decorator @@ -190,15 +190,15 @@ def test_http_function_name_uses_prefix_format(self) -> None: def capture_function_name( self: AgentFunctionApp, name: str, *args: Any, **kwargs: Any - ) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + ) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: captured_names.append(name) return func return decorator - def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: return func return decorator @@ -220,16 +220,16 @@ def test_setup_skips_http_trigger_when_disabled(self) -> None: captured_routes: list[str | None] = [] - def capture_route(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def capture_route(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: route_key = kwargs.get("route") if kwargs else None captured_routes.append(route_key) return func return decorator - def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def passthrough_decorator(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: return func return decorator @@ -738,14 +738,14 @@ class TestHttpRunRoute: def _get_run_handler(agent: Mock) -> Callable[[func.HttpRequest, Any], Awaitable[func.HttpResponse]]: captured_handlers: dict[str | None, Callable[..., Awaitable[func.HttpResponse]]] = {} - def capture_decorator(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def capture_decorator(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: return func return decorator - def capture_route(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def capture_route(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: route_key = kwargs.get("route") if kwargs else None captured_handlers[route_key] = func return func @@ -1144,8 +1144,8 @@ def test_health_check_includes_mcp_tool_enabled(self) -> None: # Capture the health check handler function captured_handler: Callable[[func.HttpRequest], func.HttpResponse] | None = None - def capture_decorator(*args: Any, **kwargs: Any) -> Callable[[TFunc], TFunc]: - def decorator(func: TFunc) -> TFunc: + def capture_decorator(*args: Any, **kwargs: Any) -> Callable[[FuncT], FuncT]: + def decorator(func: FuncT) -> FuncT: nonlocal captured_handler captured_handler = func return func diff --git a/python/packages/azurefunctions/tests/test_entities.py b/python/packages/azurefunctions/tests/test_entities.py index 2294101164..eb740daddb 100644 --- a/python/packages/azurefunctions/tests/test_entities.py +++ b/python/packages/azurefunctions/tests/test_entities.py @@ -14,7 +14,7 @@ from agent_framework_azurefunctions._entities import create_agent_entity -TFunc = TypeVar("TFunc", bound=Callable[..., Any]) +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) def _agent_response(text: str | None) -> AgentResponse: diff --git a/python/packages/bedrock/agent_framework_bedrock/_chat_client.py b/python/packages/bedrock/agent_framework_bedrock/_chat_client.py index 63e779291c..a77195f778 100644 --- a/python/packages/bedrock/agent_framework_bedrock/_chat_client.py +++ b/python/packages/bedrock/agent_framework_bedrock/_chat_client.py @@ -60,7 +60,7 @@ "BedrockSettings", ] -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region Bedrock Chat Options TypedDict @@ -89,7 +89,7 @@ class BedrockGuardrailConfig(TypedDict, total=False): """How to process guardrails during streaming (sync blocks, async does not).""" -class BedrockChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class BedrockChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """Amazon Bedrock Converse API-specific chat options dict. Extends base ChatOptions with Bedrock-specific parameters. @@ -181,7 +181,7 @@ class BedrockChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], t } """Maps ChatOptions keys to Bedrock Converse API parameter names.""" -TBedrockChatOptions = TypeVar("TBedrockChatOptions", bound=TypedDict, default="BedrockChatOptions", covariant=True) # type: ignore[valid-type] +BedrockChatOptionsT = TypeVar("BedrockChatOptionsT", bound=TypedDict, default="BedrockChatOptions", covariant=True) # type: ignore[valid-type] # endregion @@ -217,11 +217,11 @@ class BedrockSettings(AFBaseSettings): class BedrockChatClient( - ChatMiddlewareLayer[TBedrockChatOptions], - FunctionInvocationLayer[TBedrockChatOptions], - ChatTelemetryLayer[TBedrockChatOptions], - BaseChatClient[TBedrockChatOptions], - Generic[TBedrockChatOptions], + ChatMiddlewareLayer[BedrockChatOptionsT], + FunctionInvocationLayer[BedrockChatOptionsT], + ChatTelemetryLayer[BedrockChatOptionsT], + BaseChatClient[BedrockChatOptionsT], + Generic[BedrockChatOptionsT], ): """Async chat client for Amazon Bedrock's Converse API with middleware, telemetry, and function invocation.""" diff --git a/python/packages/claude/agent_framework_claude/_agent.py b/python/packages/claude/agent_framework_claude/_agent.py index 77893cd165..e081623dd9 100644 --- a/python/packages/claude/agent_framework_claude/_agent.py +++ b/python/packages/claude/agent_framework_claude/_agent.py @@ -137,15 +137,15 @@ class ClaudeAgentOptions(TypedDict, total=False): """Beta features to enable.""" -TOptions = TypeVar( - "TOptions", +OptionsT = TypeVar( + "OptionsT", bound=TypedDict, # type: ignore[valid-type] default="ClaudeAgentOptions", covariant=True, ) -class ClaudeAgent(BaseAgent, Generic[TOptions]): +class ClaudeAgent(BaseAgent, Generic[OptionsT]): """Claude Agent using Claude Code CLI. Wraps the Claude Agent SDK to provide agentic capabilities including @@ -221,7 +221,7 @@ def __init__( | str | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any] | str] | None = None, - default_options: TOptions | MutableMapping[str, Any] | None = None, + default_options: OptionsT | MutableMapping[str, Any] | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, ) -> None: @@ -328,7 +328,7 @@ def _normalize_tools( normalized = normalize_tools(tool) self._custom_tools.extend(normalized) - async def __aenter__(self) -> "ClaudeAgent[TOptions]": + async def __aenter__(self) -> "ClaudeAgent[OptionsT]": """Start the agent when entering async context.""" await self.start() return self @@ -559,7 +559,7 @@ def run( *, stream: Literal[True], thread: AgentThread | None = None, - options: TOptions | MutableMapping[str, Any] | None = None, + options: OptionsT | MutableMapping[str, Any] | None = None, **kwargs: Any, ) -> AsyncIterable[AgentResponseUpdate]: ... @@ -570,7 +570,7 @@ async def run( *, stream: Literal[False] = ..., thread: AgentThread | None = None, - options: TOptions | MutableMapping[str, Any] | None = None, + options: OptionsT | MutableMapping[str, Any] | None = None, **kwargs: Any, ) -> AgentResponse[Any]: ... @@ -580,7 +580,7 @@ def run( *, stream: bool = False, thread: AgentThread | None = None, - options: TOptions | MutableMapping[str, Any] | None = None, + options: OptionsT | MutableMapping[str, Any] | None = None, **kwargs: Any, ) -> AsyncIterable[AgentResponseUpdate] | Awaitable[AgentResponse[Any]]: """Run the agent with the given messages. @@ -609,7 +609,7 @@ async def _run_non_streaming( messages: str | ChatMessage | Sequence[str | ChatMessage] | None = None, *, thread: AgentThread | None = None, - options: TOptions | MutableMapping[str, Any] | None = None, + options: OptionsT | MutableMapping[str, Any] | None = None, **kwargs: Any, ) -> AgentResponse[Any]: """Internal non-streaming implementation.""" @@ -623,7 +623,7 @@ async def _run_streaming( messages: str | ChatMessage | Sequence[str | ChatMessage] | None = None, *, thread: AgentThread | None = None, - options: TOptions | MutableMapping[str, Any] | None = None, + options: OptionsT | MutableMapping[str, Any] | None = None, **kwargs: Any, ) -> AsyncIterable[AgentResponseUpdate]: """Internal streaming implementation.""" diff --git a/python/packages/core/agent_framework/_agents.py b/python/packages/core/agent_framework/_agents.py index 9e71738b9b..98a1c13060 100644 --- a/python/packages/core/agent_framework/_agents.py +++ b/python/packages/core/agent_framework/_agents.py @@ -68,15 +68,15 @@ from ._types import ChatOptions -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None, covariant=True) -TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None, covariant=True) +ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) logger = get_logger("agent_framework") -TThreadType = TypeVar("TThreadType", bound="AgentThread") -TOptions_co = TypeVar( - "TOptions_co", +ThreadTypeT = TypeVar("ThreadTypeT", bound="AgentThread") +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", covariant=True, @@ -528,7 +528,7 @@ async def agent_wrapper(**kwargs: Any) -> str: # region ChatAgent -class RawChatAgent(BaseAgent, Generic[TOptions_co]): # type: ignore[misc] +class RawChatAgent(BaseAgent, Generic[OptionsCoT]): # type: ignore[misc] """A Chat Client Agent without middleware or telemetry layers. This is the core chat agent implementation. For most use cases, @@ -611,7 +611,7 @@ def get_weather(location: str) -> str: def __init__( self, - chat_client: ChatClientProtocol[TOptions_co], + chat_client: ChatClientProtocol[OptionsCoT], instructions: str | None = None, *, id: str | None = None, @@ -622,7 +622,7 @@ def __init__( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, **kwargs: Any, @@ -787,9 +787,9 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "ChatOptions[TResponseModelT]", + options: "ChatOptions[ResponseModelBoundT]", **kwargs: Any, - ) -> Awaitable[AgentResponse[TResponseModelT]]: ... + ) -> Awaitable[AgentResponse[ResponseModelBoundT]]: ... @overload def run( @@ -803,7 +803,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[None] | None" = None, + options: "OptionsCoT | ChatOptions[None] | None" = None, **kwargs: Any, ) -> Awaitable[AgentResponse[Any]]: ... @@ -819,7 +819,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: "OptionsCoT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> ResponseStream[AgentResponseUpdate, AgentResponse[Any]]: ... @@ -834,7 +834,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: "OptionsCoT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> Awaitable[AgentResponse[Any]] | ResponseStream[AgentResponseUpdate, AgentResponse[Any]]: """Run the agent with the given messages and options. @@ -1373,8 +1373,8 @@ def _get_agent_name(self) -> str: class ChatAgent( AgentTelemetryLayer, AgentMiddlewareLayer, - RawChatAgent[TOptions_co], - Generic[TOptions_co], + RawChatAgent[OptionsCoT], + Generic[OptionsCoT], ): """A Chat Client Agent with middleware, telemetry, and full layer support. @@ -1387,7 +1387,7 @@ class ChatAgent( def __init__( self, - chat_client: ChatClientProtocol[TOptions_co], + chat_client: ChatClientProtocol[OptionsCoT], instructions: str | None = None, *, id: str | None = None, @@ -1398,7 +1398,7 @@ def __init__( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, middleware: Sequence[MiddlewareTypes] | None = None, diff --git a/python/packages/core/agent_framework/_clients.py b/python/packages/core/agent_framework/_clients.py index 5bafb60eb5..245540902f 100644 --- a/python/packages/core/agent_framework/_clients.py +++ b/python/packages/core/agent_framework/_clients.py @@ -56,10 +56,10 @@ from ._types import ChatOptions -TInput = TypeVar("TInput", contravariant=True) +InputT = TypeVar("InputT", contravariant=True) -TEmbedding = TypeVar("TEmbedding") -TBaseChatClient = TypeVar("TBaseChatClient", bound="BaseChatClient") +EmbeddingT = TypeVar("EmbeddingT") +BaseChatClientT = TypeVar("BaseChatClientT", bound="BaseChatClient") logger = get_logger() @@ -72,19 +72,19 @@ # region ChatClientProtocol Protocol # Contravariant for the Protocol -TOptions_contra = TypeVar( - "TOptions_contra", +OptionsContraT = TypeVar( + "OptionsContraT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", contravariant=True, ) # Used for the overloads that capture the response model type from options -TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) +ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) @runtime_checkable -class ChatClientProtocol(Protocol[TOptions_contra]): +class ChatClientProtocol(Protocol[OptionsContraT]): """A protocol for a chat client that can generate responses. This protocol defines the interface that all chat clients must implement, @@ -137,9 +137,9 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "ChatOptions[TResponseModelT]", + options: "ChatOptions[ResponseModelBoundT]", **kwargs: Any, - ) -> Awaitable[ChatResponse[TResponseModelT]]: ... + ) -> Awaitable[ChatResponse[ResponseModelBoundT]]: ... @overload def get_response( @@ -147,7 +147,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "TOptions_contra | ChatOptions[None] | None" = None, + options: "OptionsContraT | ChatOptions[None] | None" = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -157,7 +157,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: "TOptions_contra | ChatOptions[Any] | None" = None, + options: "OptionsContraT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -166,7 +166,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: "TOptions_contra | ChatOptions[Any] | None" = None, + options: "OptionsContraT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Send input and return the response. @@ -193,15 +193,15 @@ def get_response( # region ChatClientBase # Covariant for the BaseChatClient -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", covariant=True, ) -class BaseChatClient(SerializationMixin, ABC, Generic[TOptions_co]): +class BaseChatClient(SerializationMixin, ABC, Generic[OptionsCoT]): """Abstract base class for chat clients without middleware wrapping. This abstract base class provides core functionality for chat client implementations, @@ -366,9 +366,9 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "ChatOptions[TResponseModelT]", + options: "ChatOptions[ResponseModelBoundT]", **kwargs: Any, - ) -> Awaitable[ChatResponse[TResponseModelT]]: ... + ) -> Awaitable[ChatResponse[ResponseModelBoundT]]: ... @overload def get_response( @@ -376,7 +376,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "TOptions_co | ChatOptions[None] | None" = None, + options: "OptionsCoT | ChatOptions[None] | None" = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -386,7 +386,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: "OptionsCoT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -395,7 +395,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: "OptionsCoT | ChatOptions[Any] | None" = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Get a response from a chat client. @@ -440,13 +440,13 @@ def as_agent( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions_co | Mapping[str, Any] | None = None, + default_options: OptionsCoT | Mapping[str, Any] | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, middleware: Sequence["MiddlewareTypes"] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a ChatAgent with this client. This is a convenience method that creates a ChatAgent instance with this diff --git a/python/packages/core/agent_framework/_middleware.py b/python/packages/core/agent_framework/_middleware.py index eff57cfdcb..c64768218e 100644 --- a/python/packages/core/agent_framework/_middleware.py +++ b/python/packages/core/agent_framework/_middleware.py @@ -40,7 +40,7 @@ from ._tools import FunctionTool from ._types import ChatOptions, ChatResponse, ChatResponseUpdate - TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) + ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) __all__ = [ "AgentContext", @@ -65,21 +65,21 @@ ] AgentT = TypeVar("AgentT", bound="SupportsAgentRun") -TContext = TypeVar("TContext") -TUpdate = TypeVar("TUpdate") +ContextT = TypeVar("ContextT") +UpdateT = TypeVar("UpdateT") -class _EmptyAsyncIterator(Generic[TUpdate]): +class _EmptyAsyncIterator(Generic[UpdateT]): """Empty async iterator that yields nothing. Used when middleware terminates without setting a result, and we need to provide an empty stream. """ - def __aiter__(self) -> _EmptyAsyncIterator[TUpdate]: + def __aiter__(self) -> _EmptyAsyncIterator[UpdateT]: return self - async def __anext__(self) -> TUpdate: + async def __anext__(self) -> UpdateT: raise StopAsyncIteration @@ -656,20 +656,20 @@ async def logging_middleware(context: ChatContext, next): return func -class MiddlewareWrapper(Generic[TContext]): +class MiddlewareWrapper(Generic[ContextT]): """Generic wrapper to convert pure functions into middleware protocol objects. This wrapper allows function-based middleware to be used alongside class-based middleware by providing a unified interface. Type Parameters: - TContext: The type of context object this middleware operates on. + ContextT: The type of context object this middleware operates on. """ - def __init__(self, func: Callable[[TContext, Callable[[TContext], Awaitable[None]]], Awaitable[None]]) -> None: + def __init__(self, func: Callable[[ContextT, Callable[[ContextT], Awaitable[None]]], Awaitable[None]]) -> None: self.func = func - async def process(self, context: TContext, next: Callable[[TContext], Awaitable[None]]) -> None: + async def process(self, context: ContextT, next: Callable[[ContextT], Awaitable[None]]) -> None: await self.func(context, next) @@ -953,15 +953,15 @@ async def current_handler(c: ChatContext) -> None: # Covariant for chat client options -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", covariant=True, ) -class ChatMiddlewareLayer(Generic[TOptions_co]): +class ChatMiddlewareLayer(Generic[OptionsCoT]): """Layer for chat clients to apply chat middleware around response generation.""" def __init__( @@ -983,9 +983,9 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: ChatOptions[TResponseModelT], + options: ChatOptions[ResponseModelBoundT], **kwargs: Any, - ) -> Awaitable[ChatResponse[TResponseModelT]]: ... + ) -> Awaitable[ChatResponse[ResponseModelBoundT]]: ... @overload def get_response( @@ -993,7 +993,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: TOptions_co | ChatOptions[None] | None = None, + options: OptionsCoT | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -1003,7 +1003,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -1012,7 +1012,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Execute the chat pipeline if middleware is configured.""" @@ -1102,9 +1102,9 @@ def run( stream: Literal[False] = ..., thread: AgentThread | None = None, middleware: Sequence[MiddlewareTypes] | None = None, - options: ChatOptions[TResponseModelT], + options: ChatOptions[ResponseModelBoundT], **kwargs: Any, - ) -> Awaitable[AgentResponse[TResponseModelT]]: ... + ) -> Awaitable[AgentResponse[ResponseModelBoundT]]: ... @overload def run( diff --git a/python/packages/core/agent_framework/_pydantic.py b/python/packages/core/agent_framework/_pydantic.py index 8aac34e02f..75099a541d 100644 --- a/python/packages/core/agent_framework/_pydantic.py +++ b/python/packages/core/agent_framework/_pydantic.py @@ -12,7 +12,7 @@ __all__ = ["AFBaseSettings", "HTTPsUrl"] -TSettings = TypeVar("TSettings", bound="AFBaseSettings") +SettingsT = TypeVar("SettingsT", bound="AFBaseSettings") class AFBaseSettings(BaseSettings): @@ -48,7 +48,7 @@ def __init__( kwargs = {k: v for k, v in kwargs.items() if v is not None} super().__init__(**kwargs) - def __new__(cls: type["TSettings"], *args: Any, **kwargs: Any) -> "TSettings": + def __new__(cls: type["SettingsT"], *args: Any, **kwargs: Any) -> "SettingsT": """Override the __new__ method to set the env_prefix.""" # for both, if supplied but None, set to default if "env_file_encoding" in kwargs and kwargs["env_file_encoding"] is not None: diff --git a/python/packages/core/agent_framework/_serialization.py b/python/packages/core/agent_framework/_serialization.py index dd6b8f871f..95c53790f0 100644 --- a/python/packages/core/agent_framework/_serialization.py +++ b/python/packages/core/agent_framework/_serialization.py @@ -9,8 +9,8 @@ logger = get_logger() -TClass = TypeVar("TClass", bound="SerializationMixin") -TProtocol = TypeVar("TProtocol", bound="SerializationProtocol") +ClassT = TypeVar("ClassT", bound="SerializationMixin") +ProtocolT = TypeVar("ProtocolT", bound="SerializationProtocol") # Regex pattern for converting CamelCase to snake_case _CAMEL_TO_SNAKE_PATTERN = re.compile(r"(? dict[str, Any]: ... @classmethod - def from_dict(cls: type[TProtocol], value: MutableMapping[str, Any], /, **kwargs: Any) -> TProtocol: + def from_dict(cls: type[ProtocolT], value: MutableMapping[str, Any], /, **kwargs: Any) -> ProtocolT: """Create an instance from a dictionary. Args: @@ -390,8 +390,8 @@ def to_json(self, *, exclude: set[str] | None = None, exclude_none: bool = True, @classmethod def from_dict( - cls: type[TClass], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> TClass: + cls: type[ClassT], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None + ) -> ClassT: """Create an instance from a dictionary with optional dependency injection. This method reconstructs an object from its dictionary representation, automatically @@ -558,7 +558,7 @@ async def get_current_weather(location: Annotated[str, "The city name"]) -> str: return cls(**kwargs) @classmethod - def from_json(cls: type[TClass], value: str, /, *, dependencies: MutableMapping[str, Any] | None = None) -> TClass: + def from_json(cls: type[ClassT], value: str, /, *, dependencies: MutableMapping[str, Any] | None = None) -> ClassT: """Create an instance from a JSON string. This is a convenience method that parses the JSON string using ``json.loads()`` diff --git a/python/packages/core/agent_framework/_threads.py b/python/packages/core/agent_framework/_threads.py index 6692bdb3c4..52680b7064 100644 --- a/python/packages/core/agent_framework/_threads.py +++ b/python/packages/core/agent_framework/_threads.py @@ -180,7 +180,7 @@ def __init__( raise TypeError("Could not parse ChatMessageStoreState.") -TChatMessageStore = TypeVar("TChatMessageStore", bound="ChatMessageStore") +ChatMessageStoreT = TypeVar("ChatMessageStoreT", bound="ChatMessageStore") class ChatMessageStore: @@ -241,8 +241,8 @@ async def list_messages(self) -> list[ChatMessage]: @classmethod async def deserialize( - cls: type[TChatMessageStore], serialized_store_state: MutableMapping[str, Any], **kwargs: Any - ) -> TChatMessageStore: + cls: type[ChatMessageStoreT], serialized_store_state: MutableMapping[str, Any], **kwargs: Any + ) -> ChatMessageStoreT: """Create a new ChatMessageStore instance from serialized state data. Args: @@ -287,7 +287,7 @@ async def serialize(self, **kwargs: Any) -> dict[str, Any]: return state.to_dict() -TAgentThread = TypeVar("TAgentThread", bound="AgentThread") +AgentThreadT = TypeVar("AgentThreadT", bound="AgentThread") class AgentThread: @@ -435,12 +435,12 @@ async def serialize(self, **kwargs: Any) -> dict[str, Any]: @classmethod async def deserialize( - cls: type[TAgentThread], + cls: type[AgentThreadT], serialized_thread_state: MutableMapping[str, Any], *, message_store: ChatMessageStoreProtocol | None = None, **kwargs: Any, - ) -> TAgentThread: + ) -> AgentThreadT: """Deserializes the state from a dictionary into a new AgentThread instance. Args: diff --git a/python/packages/core/agent_framework/_tools.py b/python/packages/core/agent_framework/_tools.py index 7e22b78827..27cbc67723 100644 --- a/python/packages/core/agent_framework/_tools.py +++ b/python/packages/core/agent_framework/_tools.py @@ -76,7 +76,7 @@ ResponseStream, ) - TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) + ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) logger = get_logger() @@ -100,7 +100,7 @@ logger = get_logger() DEFAULT_MAX_ITERATIONS: Final[int] = 40 DEFAULT_MAX_CONSECUTIVE_ERRORS_PER_REQUEST: Final[int] = 3 -TChatClient = TypeVar("TChatClient", bound="ChatClientProtocol[Any]") +ChatClientT = TypeVar("ChatClientT", bound="ChatClientProtocol[Any]") # region Helpers ArgsT = TypeVar("ArgsT", bound=BaseModel, default=BaseModel) @@ -569,7 +569,7 @@ def _default_histogram() -> Histogram: ) -TClass = TypeVar("TClass", bound="SerializationMixin") +ClassT = TypeVar("ClassT", bound="SerializationMixin") class EmptyInputModel(BaseModel): @@ -2043,15 +2043,15 @@ async def _process_function_requests( return result -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", covariant=True, ) -class FunctionInvocationLayer(Generic[TOptions_co]): +class FunctionInvocationLayer(Generic[OptionsCoT]): """Layer for chat clients to apply function invocation around get_response.""" def __init__( @@ -2075,9 +2075,9 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: ChatOptions[TResponseModelT], + options: ChatOptions[ResponseModelBoundT], **kwargs: Any, - ) -> Awaitable[ChatResponse[TResponseModelT]]: ... + ) -> Awaitable[ChatResponse[ResponseModelBoundT]]: ... @overload def get_response( @@ -2085,7 +2085,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: TOptions_co | ChatOptions[None] | None = None, + options: OptionsCoT | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -2095,7 +2095,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -2104,7 +2104,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, function_middleware: Sequence[FunctionMiddlewareTypes] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: diff --git a/python/packages/core/agent_framework/_types.py b/python/packages/core/agent_framework/_types.py index b5fc029894..9c28a5a7e1 100644 --- a/python/packages/core/agent_framework/_types.py +++ b/python/packages/core/agent_framework/_types.py @@ -36,17 +36,17 @@ "ChatResponse", "ChatResponseUpdate", "Content", + "FinalT", "FinishReason", "FinishReasonLiteral", + "OuterFinalT", + "OuterUpdateT", "ResponseStream", "Role", "RoleLiteral", - "TFinal", - "TOuterFinal", - "TOuterUpdate", - "TUpdate", "TextSpanRegion", "ToolMode", + "UpdateT", "UsageDetails", "add_usage_details", "detect_media_type_from_base64", @@ -305,12 +305,12 @@ def _serialize_value(value: Any, exclude_none: bool) -> Any: # region Constants and types _T = TypeVar("_T") -TEmbedding = TypeVar("TEmbedding") -TChatResponse = TypeVar("TChatResponse", bound="ChatResponse") -TToolMode = TypeVar("TToolMode", bound="ToolMode") -TAgentRunResponse = TypeVar("TAgentRunResponse", bound="AgentResponse") -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None, covariant=True) -TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) +EmbeddingT = TypeVar("EmbeddingT") +ChatResponseT = TypeVar("ChatResponseT", bound="ChatResponse") +ToolModeT = TypeVar("ToolModeT", bound="ToolMode") +AgentResponseT = TypeVar("AgentResponseT", bound="AgentResponse") +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None, covariant=True) +ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) CreatedAtT = str # Use a datetimeoffset type? Or a more specific type like datetime.datetime? @@ -389,7 +389,7 @@ class Annotation(TypedDict, total=False): raw_representation: Any -TContent = TypeVar("TContent", bound="Content") +ContentT = TypeVar("ContentT", bound="Content") # endregion @@ -544,13 +544,13 @@ def __init__( @classmethod def from_text( - cls: type[TContent], + cls: type[ContentT], text: str, *, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create text content.""" return cls( "text", @@ -562,14 +562,14 @@ def from_text( @classmethod def from_text_reasoning( - cls: type[TContent], + cls: type[ContentT], *, text: str | None = None, protected_data: str | None = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create text reasoning content.""" return cls( "text_reasoning", @@ -582,14 +582,14 @@ def from_text_reasoning( @classmethod def from_data( - cls: type[TContent], + cls: type[ContentT], data: bytes, media_type: str, *, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: r"""Create data content from raw binary data. Use this to create content from binary data (images, audio, documents, etc.). @@ -658,14 +658,14 @@ def from_data( @classmethod def from_uri( - cls: type[TContent], + cls: type[ContentT], uri: str, *, media_type: str | None = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create content from a URI, can be both data URI or external URI. Use this when you already have a properly formed data URI @@ -720,7 +720,7 @@ def from_uri( @classmethod def from_error( - cls: type[TContent], + cls: type[ContentT], *, message: str | None = None, error_code: str | None = None, @@ -728,7 +728,7 @@ def from_error( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create error content.""" return cls( "error", @@ -742,7 +742,7 @@ def from_error( @classmethod def from_function_call( - cls: type[TContent], + cls: type[ContentT], call_id: str, name: str, *, @@ -751,7 +751,7 @@ def from_function_call( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create function call content.""" return cls( "function_call", @@ -766,7 +766,7 @@ def from_function_call( @classmethod def from_function_result( - cls: type[TContent], + cls: type[ContentT], call_id: str, *, result: Any = None, @@ -774,7 +774,7 @@ def from_function_result( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create function result content.""" return cls( "function_result", @@ -788,13 +788,13 @@ def from_function_result( @classmethod def from_usage( - cls: type[TContent], + cls: type[ContentT], usage_details: UsageDetails, *, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create usage content.""" return cls( "usage", @@ -806,7 +806,7 @@ def from_usage( @classmethod def from_hosted_file( - cls: type[TContent], + cls: type[ContentT], file_id: str, *, media_type: str | None = None, @@ -814,7 +814,7 @@ def from_hosted_file( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create hosted file content.""" return cls( "hosted_file", @@ -828,13 +828,13 @@ def from_hosted_file( @classmethod def from_hosted_vector_store( - cls: type[TContent], + cls: type[ContentT], vector_store_id: str, *, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create hosted vector store content.""" return cls( "hosted_vector_store", @@ -846,14 +846,14 @@ def from_hosted_vector_store( @classmethod def from_code_interpreter_tool_call( - cls: type[TContent], + cls: type[ContentT], *, call_id: str | None = None, inputs: Sequence[Content] | None = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create code interpreter tool call content.""" return cls( "code_interpreter_tool_call", @@ -866,14 +866,14 @@ def from_code_interpreter_tool_call( @classmethod def from_code_interpreter_tool_result( - cls: type[TContent], + cls: type[ContentT], *, call_id: str | None = None, outputs: Sequence[Content] | None = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create code interpreter tool result content.""" return cls( "code_interpreter_tool_result", @@ -886,13 +886,13 @@ def from_code_interpreter_tool_result( @classmethod def from_image_generation_tool_call( - cls: type[TContent], + cls: type[ContentT], *, image_id: str | None = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create image generation tool call content.""" return cls( "image_generation_tool_call", @@ -904,14 +904,14 @@ def from_image_generation_tool_call( @classmethod def from_image_generation_tool_result( - cls: type[TContent], + cls: type[ContentT], *, image_id: str | None = None, outputs: Any = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create image generation tool result content.""" return cls( "image_generation_tool_result", @@ -924,7 +924,7 @@ def from_image_generation_tool_result( @classmethod def from_mcp_server_tool_call( - cls: type[TContent], + cls: type[ContentT], call_id: str, tool_name: str, *, @@ -933,7 +933,7 @@ def from_mcp_server_tool_call( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create MCP server tool call content.""" return cls( "mcp_server_tool_call", @@ -948,14 +948,14 @@ def from_mcp_server_tool_call( @classmethod def from_mcp_server_tool_result( - cls: type[TContent], + cls: type[ContentT], call_id: str, *, output: Any = None, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create MCP server tool result content.""" return cls( "mcp_server_tool_result", @@ -968,14 +968,14 @@ def from_mcp_server_tool_result( @classmethod def from_function_approval_request( - cls: type[TContent], + cls: type[ContentT], id: str, function_call: Content, *, annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create function approval request content.""" return cls( "function_approval_request", @@ -989,7 +989,7 @@ def from_function_approval_request( @classmethod def from_function_approval_response( - cls: type[TContent], + cls: type[ContentT], approved: bool, id: str, function_call: Content, @@ -997,7 +997,7 @@ def from_function_approval_response( annotations: Sequence[Annotation] | None = None, additional_properties: MutableMapping[str, Any] | None = None, raw_representation: Any = None, - ) -> TContent: + ) -> ContentT: """Create function approval response content.""" return cls( "function_approval_response", @@ -1091,7 +1091,7 @@ def __str__(self) -> str: return f"Content(type={self.type})" @classmethod - def from_dict(cls: type[TContent], data: Mapping[str, Any]) -> TContent: + def from_dict(cls: type[ContentT], data: Mapping[str, Any]) -> ContentT: """Create a Content instance from a mapping.""" if not (content_type := data.get("type")): raise ValueError("Content mapping requires 'type'") @@ -1796,7 +1796,7 @@ def _finalize_response(response: ChatResponse | AgentResponse) -> None: _coalesce_text_content(msg.contents, "text_reasoning") -class ChatResponse(SerializationMixin, Generic[TResponseModel]): +class ChatResponse(SerializationMixin, Generic[ResponseModelT]): """Represents the response to a chat request. Attributes: @@ -1859,7 +1859,7 @@ def __init__( created_at: CreatedAtT | None = None, finish_reason: FinishReasonLiteral | FinishReason | None = None, usage_details: UsageDetails | None = None, - value: TResponseModel | None = None, + value: ResponseModelT | None = None, response_format: type[BaseModel] | None = None, additional_properties: dict[str, Any] | None = None, raw_representation: Any | None = None, @@ -1903,7 +1903,7 @@ def __init__( finish_reason = finish_reason["value"] self.finish_reason = finish_reason self.usage_details = usage_details - self._value: TResponseModel | None = value + self._value: ResponseModelT | None = value self._response_format: type[BaseModel] | None = response_format self._value_parsed: bool = value is not None self.additional_properties = additional_properties or {} @@ -1915,8 +1915,8 @@ def from_updates( cls: type[ChatResponse[Any]], updates: Sequence[ChatResponseUpdate], *, - output_format_type: type[TResponseModelT], - ) -> ChatResponse[TResponseModelT]: ... + output_format_type: type[ResponseModelBoundT], + ) -> ChatResponse[ResponseModelBoundT]: ... @overload @classmethod @@ -1929,11 +1929,11 @@ def from_updates( @classmethod def from_updates( - cls: type[TChatResponse], + cls: type[ChatResponseT], updates: Sequence[ChatResponseUpdate], *, output_format_type: type[BaseModel] | None = None, - ) -> TChatResponse: + ) -> ChatResponseT: """Joins multiple updates into a single ChatResponse. Example: @@ -1970,8 +1970,8 @@ async def from_update_generator( cls: type[ChatResponse[Any]], updates: AsyncIterable[ChatResponseUpdate], *, - output_format_type: type[TResponseModelT], - ) -> ChatResponse[TResponseModelT]: ... + output_format_type: type[ResponseModelBoundT], + ) -> ChatResponse[ResponseModelBoundT]: ... @overload @classmethod @@ -1984,11 +1984,11 @@ async def from_update_generator( @classmethod async def from_update_generator( - cls: type[TChatResponse], + cls: type[ChatResponseT], updates: AsyncIterable[ChatResponseUpdate], *, output_format_type: type[BaseModel] | None = None, - ) -> TChatResponse: + ) -> ChatResponseT: """Joins multiple updates into a single ChatResponse. Example: @@ -2021,7 +2021,7 @@ def text(self) -> str: return ("\n".join(message.text for message in self.messages if isinstance(message, ChatMessage))).strip() @property - def value(self) -> TResponseModel | None: + def value(self) -> ResponseModelT | None: """Get the parsed structured output value. If a response_format was provided and parsing hasn't been attempted yet, @@ -2037,7 +2037,7 @@ def value(self) -> TResponseModel | None: and isinstance(self._response_format, type) and issubclass(self._response_format, BaseModel) ): - self._value = cast(TResponseModel, self._response_format.model_validate_json(self.text)) + self._value = cast(ResponseModelT, self._response_format.model_validate_json(self.text)) self._value_parsed = True return self._value @@ -2166,7 +2166,7 @@ def __str__(self) -> str: # region AgentResponse -class AgentResponse(SerializationMixin, Generic[TResponseModel]): +class AgentResponse(SerializationMixin, Generic[ResponseModelT]): """Represents the response to an Agent run request. Provides one or more response messages and metadata about the response. @@ -2220,7 +2220,7 @@ def __init__( agent_id: str | None = None, created_at: CreatedAtT | None = None, usage_details: UsageDetails | None = None, - value: TResponseModel | None = None, + value: ResponseModelT | None = None, response_format: type[BaseModel] | None = None, raw_representation: Any | None = None, additional_properties: dict[str, Any] | None = None, @@ -2258,7 +2258,7 @@ def __init__( self.agent_id = agent_id self.created_at = created_at self.usage_details = usage_details - self._value: TResponseModel | None = value + self._value: ResponseModelT | None = value self._response_format: type[BaseModel] | None = response_format self._value_parsed: bool = value is not None self.additional_properties = additional_properties or {} @@ -2270,7 +2270,7 @@ def text(self) -> str: return "".join(msg.text for msg in self.messages) if self.messages else "" @property - def value(self) -> TResponseModel | None: + def value(self) -> ResponseModelT | None: """Get the parsed structured output value. If a response_format was provided and parsing hasn't been attempted yet, @@ -2286,7 +2286,7 @@ def value(self) -> TResponseModel | None: and isinstance(self._response_format, type) and issubclass(self._response_format, BaseModel) ): - self._value = cast(TResponseModel, self._response_format.model_validate_json(self.text)) + self._value = cast(ResponseModelT, self._response_format.model_validate_json(self.text)) self._value_parsed = True return self._value @@ -2306,8 +2306,8 @@ def from_updates( cls: type[AgentResponse[Any]], updates: Sequence[AgentResponseUpdate], *, - output_format_type: type[TResponseModelT], - ) -> AgentResponse[TResponseModelT]: ... + output_format_type: type[ResponseModelBoundT], + ) -> AgentResponse[ResponseModelBoundT]: ... @overload @classmethod @@ -2320,11 +2320,11 @@ def from_updates( @classmethod def from_updates( - cls: type[TAgentRunResponse], + cls: type[AgentResponseT], updates: Sequence[AgentResponseUpdate], *, output_format_type: type[BaseModel] | None = None, - ) -> TAgentRunResponse: + ) -> AgentResponseT: """Joins multiple updates into a single AgentResponse. Args: @@ -2345,8 +2345,8 @@ async def from_update_generator( cls: type[AgentResponse[Any]], updates: AsyncIterable[AgentResponseUpdate], *, - output_format_type: type[TResponseModelT], - ) -> AgentResponse[TResponseModelT]: ... + output_format_type: type[ResponseModelBoundT], + ) -> AgentResponse[ResponseModelBoundT]: ... @overload @classmethod @@ -2359,11 +2359,11 @@ async def from_update_generator( @classmethod async def from_update_generator( - cls: type[TAgentRunResponse], + cls: type[AgentResponseT], updates: AsyncIterable[AgentResponseUpdate], *, output_format_type: type[BaseModel] | None = None, - ) -> TAgentRunResponse: + ) -> AgentResponseT: """Joins multiple updates into a single AgentResponse. Args: @@ -2520,23 +2520,23 @@ def map_chat_to_agent_update(update: ChatResponseUpdate, agent_name: str | None) # Type variables for ResponseStream -TUpdate = TypeVar("TUpdate") -TFinal = TypeVar("TFinal") -TOuterUpdate = TypeVar("TOuterUpdate") -TOuterFinal = TypeVar("TOuterFinal") +UpdateT = TypeVar("UpdateT") +FinalT = TypeVar("FinalT") +OuterUpdateT = TypeVar("OuterUpdateT") +OuterFinalT = TypeVar("OuterFinalT") -class ResponseStream(AsyncIterable[TUpdate], Generic[TUpdate, TFinal]): +class ResponseStream(AsyncIterable[UpdateT], Generic[UpdateT, FinalT]): """Async stream wrapper that supports iteration and deferred finalization.""" def __init__( self, - stream: AsyncIterable[TUpdate] | Awaitable[AsyncIterable[TUpdate]], + stream: AsyncIterable[UpdateT] | Awaitable[AsyncIterable[UpdateT]], *, - finalizer: Callable[[Sequence[TUpdate]], TFinal | Awaitable[TFinal]] | None = None, - transform_hooks: list[Callable[[TUpdate], TUpdate | Awaitable[TUpdate] | None]] | None = None, + finalizer: Callable[[Sequence[UpdateT]], FinalT | Awaitable[FinalT]] | None = None, + transform_hooks: list[Callable[[UpdateT], UpdateT | Awaitable[UpdateT] | None]] | None = None, cleanup_hooks: list[Callable[[], Awaitable[None] | None]] | None = None, - result_hooks: list[Callable[[TFinal], TFinal | Awaitable[TFinal | None] | None]] | None = None, + result_hooks: list[Callable[[FinalT], FinalT | Awaitable[FinalT | None] | None]] | None = None, ) -> None: """A Async Iterable stream of updates. @@ -2552,16 +2552,16 @@ def __init__( """ self._stream_source = stream self._finalizer = finalizer - self._stream: AsyncIterable[TUpdate] | None = None - self._iterator: AsyncIterator[TUpdate] | None = None - self._updates: list[TUpdate] = [] + self._stream: AsyncIterable[UpdateT] | None = None + self._iterator: AsyncIterator[UpdateT] | None = None + self._updates: list[UpdateT] = [] self._consumed: bool = False self._finalized: bool = False - self._final_result: TFinal | None = None - self._transform_hooks: list[Callable[[TUpdate], TUpdate | Awaitable[TUpdate] | None]] = ( + self._final_result: FinalT | None = None + self._transform_hooks: list[Callable[[UpdateT], UpdateT | Awaitable[UpdateT] | None]] = ( transform_hooks if transform_hooks is not None else [] ) - self._result_hooks: list[Callable[[TFinal], TFinal | Awaitable[TFinal | None] | None]] = ( + self._result_hooks: list[Callable[[FinalT], FinalT | Awaitable[FinalT | None] | None]] = ( result_hooks if result_hooks is not None else [] ) self._cleanup_hooks: list[Callable[[], Awaitable[None] | None]] = ( @@ -2575,9 +2575,9 @@ def __init__( def map( self, - transform: Callable[[TUpdate], TOuterUpdate | Awaitable[TOuterUpdate]], - finalizer: Callable[[Sequence[TOuterUpdate]], TOuterFinal | Awaitable[TOuterFinal]], - ) -> ResponseStream[TOuterUpdate, TOuterFinal]: + transform: Callable[[UpdateT], OuterUpdateT | Awaitable[OuterUpdateT]], + finalizer: Callable[[Sequence[OuterUpdateT]], OuterFinalT | Awaitable[OuterFinalT]], + ) -> ResponseStream[OuterUpdateT, OuterFinalT]: """Create a new stream that transforms each update. The returned stream delegates iteration to this stream, ensuring single consumption. @@ -2619,8 +2619,8 @@ def map( def with_finalizer( self, - finalizer: Callable[[Sequence[TUpdate]], TOuterFinal | Awaitable[TOuterFinal]], - ) -> ResponseStream[TUpdate, TOuterFinal]: + finalizer: Callable[[Sequence[UpdateT]], OuterFinalT | Awaitable[OuterFinalT]], + ) -> ResponseStream[UpdateT, OuterFinalT]: """Create a new stream with a different finalizer. The returned stream delegates iteration to this stream, ensuring single consumption. @@ -2647,8 +2647,8 @@ def with_finalizer( @classmethod def from_awaitable( cls, - awaitable: Awaitable[ResponseStream[TUpdate, TFinal]], - ) -> ResponseStream[TUpdate, TFinal]: + awaitable: Awaitable[ResponseStream[UpdateT, FinalT]], + ) -> ResponseStream[UpdateT, FinalT]: """Create a ResponseStream from an awaitable that resolves to a ResponseStream. This is useful when you have an async function that returns a ResponseStream @@ -2672,7 +2672,7 @@ def from_awaitable( stream._wrap_inner = True return stream # type: ignore[return-value] - async def _get_stream(self) -> AsyncIterable[TUpdate]: + async def _get_stream(self) -> AsyncIterable[UpdateT]: if self._stream is None: if hasattr(self._stream_source, "__aiter__"): self._stream = self._stream_source # type: ignore[assignment] @@ -2686,10 +2686,10 @@ async def _get_stream(self) -> AsyncIterable[TUpdate]: return self._stream return self._stream # type: ignore[return-value] - def __aiter__(self) -> ResponseStream[TUpdate, TFinal]: + def __aiter__(self) -> ResponseStream[UpdateT, FinalT]: return self - async def __anext__(self) -> TUpdate: + async def __anext__(self) -> UpdateT: if self._iterator is None: stream = await self._get_stream() self._iterator = stream.__aiter__() @@ -2718,19 +2718,19 @@ async def __anext__(self) -> TUpdate: return update def __await__(self) -> Any: - async def _wrap() -> ResponseStream[TUpdate, TFinal]: + async def _wrap() -> ResponseStream[UpdateT, FinalT]: await self._get_stream() return self return _wrap().__await__() - async def get_final_response(self) -> TFinal: + async def get_final_response(self) -> FinalT: """Get the final response by applying the finalizer to all collected updates. If a finalizer is configured, it receives the list of updates and returns the final type. Result hooks are then applied in order to transform the result. - If no finalizer is configured, returns the collected updates as Sequence[TUpdate]. + If no finalizer is configured, returns the collected updates as Sequence[UpdateT]. For wrapped streams (created via .map() or .from_awaitable()): - The inner stream's finalizer is called first to produce the inner final result. @@ -2815,16 +2815,16 @@ async def get_final_response(self) -> TFinal: def with_transform_hook( self, - hook: Callable[[TUpdate], TUpdate | Awaitable[TUpdate] | None], - ) -> ResponseStream[TUpdate, TFinal]: + hook: Callable[[UpdateT], UpdateT | Awaitable[UpdateT] | None], + ) -> ResponseStream[UpdateT, FinalT]: """Register a transform hook executed for each update during iteration.""" self._transform_hooks.append(hook) return self def with_result_hook( self, - hook: Callable[[TFinal], TFinal | Awaitable[TFinal | None] | None], - ) -> ResponseStream[TUpdate, TFinal]: + hook: Callable[[FinalT], FinalT | Awaitable[FinalT | None] | None], + ) -> ResponseStream[UpdateT, FinalT]: """Register a result hook executed after finalization.""" self._result_hooks.append(hook) self._finalized = False @@ -2834,7 +2834,7 @@ def with_result_hook( def with_cleanup_hook( self, hook: Callable[[], Awaitable[None] | None], - ) -> ResponseStream[TUpdate, TFinal]: + ) -> ResponseStream[UpdateT, FinalT]: """Register a cleanup hook executed after stream consumption (before finalizer).""" self._cleanup_hooks.append(hook) return self @@ -2849,7 +2849,7 @@ async def _run_cleanup_hooks(self) -> None: await result @property - def updates(self) -> Sequence[TUpdate]: + def updates(self) -> Sequence[UpdateT]: return self._updates @@ -2944,8 +2944,8 @@ class _ChatOptionsBase(TypedDict, total=False): if TYPE_CHECKING: - class ChatOptions(_ChatOptionsBase, Generic[TResponseModel], total=False): - response_format: type[TResponseModel] | Mapping[str, Any] | None # type: ignore[misc] + class ChatOptions(_ChatOptionsBase, Generic[ResponseModelT], total=False): + response_format: type[ResponseModelT] | Mapping[str, Any] | None # type: ignore[misc] else: ChatOptions = _ChatOptionsBase diff --git a/python/packages/core/agent_framework/_workflows/_model_utils.py b/python/packages/core/agent_framework/_workflows/_model_utils.py index 72380901c6..0627d716a8 100644 --- a/python/packages/core/agent_framework/_workflows/_model_utils.py +++ b/python/packages/core/agent_framework/_workflows/_model_utils.py @@ -9,7 +9,7 @@ else: from typing_extensions import Self # pragma: no cover -TModel = TypeVar("TModel", bound="DictConvertible") +ModelT = TypeVar("ModelT", bound="DictConvertible") class DictConvertible: @@ -19,7 +19,7 @@ def to_dict(self) -> dict[str, Any]: raise NotImplementedError @classmethod - def from_dict(cls: type[TModel], data: dict[str, Any]) -> TModel: + def from_dict(cls: type[ModelT], data: dict[str, Any]) -> ModelT: return cls(**data) # type: ignore[arg-type] def clone(self, *, deep: bool = True) -> Self: @@ -31,7 +31,7 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls: type[TModel], raw: str) -> TModel: + def from_json(cls: type[ModelT], raw: str) -> ModelT: import json data = json.loads(raw) diff --git a/python/packages/core/agent_framework/azure/_assistants_client.py b/python/packages/core/agent_framework/azure/_assistants_client.py index 4f1d2190be..13538b95e2 100644 --- a/python/packages/core/agent_framework/azure/_assistants_client.py +++ b/python/packages/core/agent_framework/azure/_assistants_client.py @@ -30,8 +30,8 @@ # region Azure OpenAI Assistants Options TypedDict -TAzureOpenAIAssistantsOptions = TypeVar( - "TAzureOpenAIAssistantsOptions", +AzureOpenAIAssistantsOptionsT = TypeVar( + "AzureOpenAIAssistantsOptionsT", bound=TypedDict, # type: ignore[valid-type] default="OpenAIAssistantsOptions", covariant=True, @@ -42,7 +42,7 @@ class AzureOpenAIAssistantsClient( - OpenAIAssistantsClient[TAzureOpenAIAssistantsOptions], Generic[TAzureOpenAIAssistantsOptions] + OpenAIAssistantsClient[AzureOpenAIAssistantsOptionsT], Generic[AzureOpenAIAssistantsOptionsT] ): """Azure OpenAI Assistants client.""" diff --git a/python/packages/core/agent_framework/azure/_chat_client.py b/python/packages/core/agent_framework/azure/_chat_client.py index 4aa85e6d7e..acce7d4105 100644 --- a/python/packages/core/agent_framework/azure/_chat_client.py +++ b/python/packages/core/agent_framework/azure/_chat_client.py @@ -51,7 +51,7 @@ __all__ = ["AzureOpenAIChatClient", "AzureOpenAIChatOptions", "AzureUserSecurityContext"] -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region Azure OpenAI Chat Options TypedDict @@ -79,7 +79,7 @@ class AzureUserSecurityContext(TypedDict, total=False): """The original client's IP address.""" -class AzureOpenAIChatOptions(OpenAIChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class AzureOpenAIChatOptions(OpenAIChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """Azure OpenAI-specific chat options dict. Extends OpenAIChatOptions with Azure-specific options including @@ -134,8 +134,8 @@ class AzureOpenAIChatOptions(OpenAIChatOptions[TResponseModel], Generic[TRespons Note: You will be charged based on tokens across all choices. Keep n=1 to minimize costs.""" -TAzureOpenAIChatOptions = TypeVar( - "TAzureOpenAIChatOptions", +AzureOpenAIChatOptionsT = TypeVar( + "AzureOpenAIChatOptionsT", bound=TypedDict, # type: ignore[valid-type] default="AzureOpenAIChatOptions", covariant=True, @@ -144,17 +144,17 @@ class AzureOpenAIChatOptions(OpenAIChatOptions[TResponseModel], Generic[TRespons # endregion -TChatResponse = TypeVar("TChatResponse", ChatResponse, ChatResponseUpdate) -TAzureOpenAIChatClient = TypeVar("TAzureOpenAIChatClient", bound="AzureOpenAIChatClient") +ChatResponseT = TypeVar("ChatResponseT", ChatResponse, ChatResponseUpdate) +AzureOpenAIChatClientT = TypeVar("AzureOpenAIChatClientT", bound="AzureOpenAIChatClient") class AzureOpenAIChatClient( # type: ignore[misc] AzureOpenAIConfigMixin, - ChatMiddlewareLayer[TAzureOpenAIChatOptions], - FunctionInvocationLayer[TAzureOpenAIChatOptions], - ChatTelemetryLayer[TAzureOpenAIChatOptions], - RawOpenAIChatClient[TAzureOpenAIChatOptions], - Generic[TAzureOpenAIChatOptions], + ChatMiddlewareLayer[AzureOpenAIChatOptionsT], + FunctionInvocationLayer[AzureOpenAIChatOptionsT], + ChatTelemetryLayer[AzureOpenAIChatOptionsT], + RawOpenAIChatClient[AzureOpenAIChatOptionsT], + Generic[AzureOpenAIChatOptionsT], ): """Azure OpenAI Chat completion class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/core/agent_framework/azure/_responses_client.py b/python/packages/core/agent_framework/azure/_responses_client.py index 8f67b726a8..cde07d8695 100644 --- a/python/packages/core/agent_framework/azure/_responses_client.py +++ b/python/packages/core/agent_framework/azure/_responses_client.py @@ -39,8 +39,8 @@ __all__ = ["AzureOpenAIResponsesClient"] -TAzureOpenAIResponsesOptions = TypeVar( - "TAzureOpenAIResponsesOptions", +AzureOpenAIResponsesOptionsT = TypeVar( + "AzureOpenAIResponsesOptionsT", bound=TypedDict, # type: ignore[valid-type] default="OpenAIResponsesOptions", covariant=True, @@ -49,11 +49,11 @@ class AzureOpenAIResponsesClient( # type: ignore[misc] AzureOpenAIConfigMixin, - ChatMiddlewareLayer[TAzureOpenAIResponsesOptions], - FunctionInvocationLayer[TAzureOpenAIResponsesOptions], - ChatTelemetryLayer[TAzureOpenAIResponsesOptions], - RawOpenAIResponsesClient[TAzureOpenAIResponsesOptions], - Generic[TAzureOpenAIResponsesOptions], + ChatMiddlewareLayer[AzureOpenAIResponsesOptionsT], + FunctionInvocationLayer[AzureOpenAIResponsesOptionsT], + ChatTelemetryLayer[AzureOpenAIResponsesOptionsT], + RawOpenAIResponsesClient[AzureOpenAIResponsesOptionsT], + Generic[AzureOpenAIResponsesOptionsT], ): """Azure Responses completion class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/core/agent_framework/observability.py b/python/packages/core/agent_framework/observability.py index 9a839bb566..34c58b3b1a 100644 --- a/python/packages/core/agent_framework/observability.py +++ b/python/packages/core/agent_framework/observability.py @@ -54,7 +54,7 @@ ResponseStream, ) - TResponseModelT = TypeVar("TResponseModelT", bound=BaseModel) + ResponseModelBoundT = TypeVar("ResponseModelBoundT", bound=BaseModel) __all__ = [ "OBSERVABILITY_SETTINGS", @@ -71,7 +71,7 @@ AgentT = TypeVar("AgentT", bound="SupportsAgentRun") -TChatClient = TypeVar("TChatClient", bound="ChatClientProtocol[Any]") +ChatClientT = TypeVar("ChatClientT", bound="ChatClientProtocol[Any]") logger = get_logger() @@ -1049,15 +1049,15 @@ def _get_token_usage_histogram() -> metrics.Histogram: ) -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="ChatOptions[None]", covariant=True, ) -class ChatTelemetryLayer(Generic[TOptions_co]): +class ChatTelemetryLayer(Generic[OptionsCoT]): """Layer that wraps chat client get_response with OpenTelemetry tracing.""" def __init__(self, *args: Any, otel_provider_name: str | None = None, **kwargs: Any) -> None: @@ -1073,9 +1073,9 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: ChatOptions[TResponseModelT], + options: ChatOptions[ResponseModelBoundT], **kwargs: Any, - ) -> Awaitable[ChatResponse[TResponseModelT]]: ... + ) -> Awaitable[ChatResponse[ResponseModelBoundT]]: ... @overload def get_response( @@ -1083,7 +1083,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: TOptions_co | ChatOptions[None] | None = None, + options: OptionsCoT | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -1093,7 +1093,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -1102,7 +1102,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: TOptions_co | ChatOptions[Any] | None = None, + options: OptionsCoT | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Trace chat responses with OpenTelemetry spans and metrics.""" diff --git a/python/packages/core/agent_framework/openai/_assistant_provider.py b/python/packages/core/agent_framework/openai/_assistant_provider.py index 103b23e716..6656de9de2 100644 --- a/python/packages/core/agent_framework/openai/_assistant_provider.py +++ b/python/packages/core/agent_framework/openai/_assistant_provider.py @@ -33,8 +33,8 @@ # Type variable for options - allows typed ChatAgent[TOptions] returns # Default matches OpenAIAssistantsClient's default options type -TOptions_co = TypeVar( - "TOptions_co", +OptionsCoT = TypeVar( + "OptionsCoT", bound=TypedDict, # type: ignore[valid-type] default="OpenAIAssistantsOptions", covariant=True, @@ -48,7 +48,7 @@ ) -class OpenAIAssistantProvider(Generic[TOptions_co]): +class OpenAIAssistantProvider(Generic[OptionsCoT]): """Provider for creating ChatAgent instances from OpenAI Assistants API. This provider allows you to create, retrieve, and wrap OpenAI Assistants @@ -203,10 +203,10 @@ async def create_agent( description: str | None = None, tools: _ToolsType | None = None, metadata: dict[str, str] | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a new assistant on OpenAI and return a ChatAgent. This method creates a new assistant on the OpenAI service and wraps it @@ -311,10 +311,10 @@ async def get_agent( *, tools: _ToolsType | None = None, instructions: str | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Retrieve an existing assistant by ID and return a ChatAgent. This method fetches an existing assistant from OpenAI by its ID @@ -377,10 +377,10 @@ def as_agent( *, tools: _ToolsType | None = None, instructions: str | None = None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Wrap an existing SDK Assistant object as a ChatAgent. This method does NOT make any HTTP calls. It simply wraps an already- @@ -522,9 +522,9 @@ def _create_chat_agent_from_assistant( instructions: str | None, middleware: Sequence[MiddlewareTypes] | None, context_provider: ContextProvider | None, - default_options: TOptions_co | None = None, + default_options: OptionsCoT | None = None, **kwargs: Any, - ) -> "ChatAgent[TOptions_co]": + ) -> "ChatAgent[OptionsCoT]": """Create a ChatAgent from an Assistant. Args: diff --git a/python/packages/core/agent_framework/openai/_assistants_client.py b/python/packages/core/agent_framework/openai/_assistants_client.py index 559b180e02..4335724f48 100644 --- a/python/packages/core/agent_framework/openai/_assistants_client.py +++ b/python/packages/core/agent_framework/openai/_assistants_client.py @@ -77,7 +77,7 @@ # region OpenAI Assistants Options TypedDict -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) class VectorStoreToolResource(TypedDict, total=False): @@ -107,7 +107,7 @@ class AssistantToolResources(TypedDict, total=False): """Resources for file search tool, including vector store IDs.""" -class OpenAIAssistantsOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class OpenAIAssistantsOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """OpenAI Assistants API-specific options dict. Extends base ChatOptions with Assistants API-specific parameters @@ -191,8 +191,8 @@ class OpenAIAssistantsOptions(ChatOptions[TResponseModel], Generic[TResponseMode } """Maps ChatOptions keys to OpenAI Assistants API parameter names.""" -TOpenAIAssistantsOptions = TypeVar( - "TOpenAIAssistantsOptions", +OpenAIAssistantsOptionsT = TypeVar( + "OpenAIAssistantsOptionsT", bound=TypedDict, # type: ignore[valid-type] default="OpenAIAssistantsOptions", covariant=True, @@ -204,11 +204,11 @@ class OpenAIAssistantsOptions(ChatOptions[TResponseModel], Generic[TResponseMode class OpenAIAssistantsClient( # type: ignore[misc] OpenAIConfigMixin, - ChatMiddlewareLayer[TOpenAIAssistantsOptions], - FunctionInvocationLayer[TOpenAIAssistantsOptions], - ChatTelemetryLayer[TOpenAIAssistantsOptions], - BaseChatClient[TOpenAIAssistantsOptions], - Generic[TOpenAIAssistantsOptions], + ChatMiddlewareLayer[OpenAIAssistantsOptionsT], + FunctionInvocationLayer[OpenAIAssistantsOptionsT], + ChatTelemetryLayer[OpenAIAssistantsOptionsT], + BaseChatClient[OpenAIAssistantsOptionsT], + Generic[OpenAIAssistantsOptionsT], ): """OpenAI Assistants client with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/core/agent_framework/openai/_chat_client.py b/python/packages/core/agent_framework/openai/_chat_client.py index 9ec10644e8..7e5444b82e 100644 --- a/python/packages/core/agent_framework/openai/_chat_client.py +++ b/python/packages/core/agent_framework/openai/_chat_client.py @@ -63,7 +63,7 @@ logger = get_logger("agent_framework.openai") -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region OpenAI Chat Options TypedDict @@ -83,7 +83,7 @@ class Prediction(TypedDict, total=False): content: str | list[PredictionTextContent] -class OpenAIChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class OpenAIChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """OpenAI-specific chat options dict. Extends ChatOptions with options specific to OpenAI's Chat Completions API. @@ -122,7 +122,7 @@ class OpenAIChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], to prediction: Prediction -TOpenAIChatOptions = TypeVar("TOpenAIChatOptions", bound=TypedDict, default="OpenAIChatOptions", covariant=True) # type: ignore[valid-type] +OpenAIChatOptionsT = TypeVar("OpenAIChatOptionsT", bound=TypedDict, default="OpenAIChatOptions", covariant=True) # type: ignore[valid-type] OPTION_TRANSLATIONS: dict[str, str] = { "model_id": "model", @@ -134,8 +134,8 @@ class OpenAIChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], to # region Base Client class RawOpenAIChatClient( # type: ignore[misc] OpenAIBase, - BaseChatClient[TOpenAIChatOptions], - Generic[TOpenAIChatOptions], + BaseChatClient[OpenAIChatOptionsT], + Generic[OpenAIChatOptionsT], ): """Raw OpenAI Chat completion class without middleware, telemetry, or function invocation. @@ -591,11 +591,11 @@ def service_url(self) -> str: class OpenAIChatClient( # type: ignore[misc] OpenAIConfigMixin, - ChatMiddlewareLayer[TOpenAIChatOptions], - FunctionInvocationLayer[TOpenAIChatOptions], - ChatTelemetryLayer[TOpenAIChatOptions], - RawOpenAIChatClient[TOpenAIChatOptions], - Generic[TOpenAIChatOptions], + ChatMiddlewareLayer[OpenAIChatOptionsT], + FunctionInvocationLayer[OpenAIChatOptionsT], + ChatTelemetryLayer[OpenAIChatOptionsT], + RawOpenAIChatClient[OpenAIChatOptionsT], + Generic[OpenAIChatOptionsT], ): """OpenAI Chat completion class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/core/agent_framework/openai/_responses_client.py b/python/packages/core/agent_framework/openai/_responses_client.py index a2e7162f70..693b593f46 100644 --- a/python/packages/core/agent_framework/openai/_responses_client.py +++ b/python/packages/core/agent_framework/openai/_responses_client.py @@ -122,10 +122,10 @@ class StreamOptions(TypedDict, total=False): """Whether to include usage statistics in stream events.""" -TResponseFormat = TypeVar("TResponseFormat", bound=BaseModel | None, default=None) +ResponseFormatT = TypeVar("ResponseFormatT", bound=BaseModel | None, default=None) -class OpenAIResponsesOptions(ChatOptions[TResponseFormat], Generic[TResponseFormat], total=False): +class OpenAIResponsesOptions(ChatOptions[ResponseFormatT], Generic[ResponseFormatT], total=False): """OpenAI Responses API-specific chat options. Extends ChatOptions with options specific to OpenAI's Responses API. @@ -189,8 +189,8 @@ class OpenAIResponsesOptions(ChatOptions[TResponseFormat], Generic[TResponseForm - 'disabled': Fail with 400 error if exceeds context""" -TOpenAIResponsesOptions = TypeVar( - "TOpenAIResponsesOptions", +OpenAIResponsesOptionsT = TypeVar( + "OpenAIResponsesOptionsT", bound=TypedDict, # type: ignore[valid-type] default="OpenAIResponsesOptions", covariant=True, @@ -205,8 +205,8 @@ class OpenAIResponsesOptions(ChatOptions[TResponseFormat], Generic[TResponseForm class RawOpenAIResponsesClient( # type: ignore[misc] OpenAIBase, - BaseChatClient[TOpenAIResponsesOptions], - Generic[TOpenAIResponsesOptions], + BaseChatClient[OpenAIResponsesOptionsT], + Generic[OpenAIResponsesOptionsT], ): """Raw OpenAI Responses client without middleware, telemetry, or function invocation. @@ -1435,11 +1435,11 @@ def _get_metadata_from_response(self, output: Any) -> dict[str, Any]: class OpenAIResponsesClient( # type: ignore[misc] OpenAIConfigMixin, - ChatMiddlewareLayer[TOpenAIResponsesOptions], - FunctionInvocationLayer[TOpenAIResponsesOptions], - ChatTelemetryLayer[TOpenAIResponsesOptions], - RawOpenAIResponsesClient[TOpenAIResponsesOptions], - Generic[TOpenAIResponsesOptions], + ChatMiddlewareLayer[OpenAIResponsesOptionsT], + FunctionInvocationLayer[OpenAIResponsesOptionsT], + ChatTelemetryLayer[OpenAIResponsesOptionsT], + RawOpenAIResponsesClient[OpenAIResponsesOptionsT], + Generic[OpenAIResponsesOptionsT], ): """OpenAI Responses client class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/core/tests/core/conftest.py b/python/packages/core/tests/core/conftest.py index 7f987ca226..7cb5e63549 100644 --- a/python/packages/core/tests/core/conftest.py +++ b/python/packages/core/tests/core/conftest.py @@ -27,7 +27,7 @@ ToolProtocol, tool, ) -from agent_framework._clients import TOptions_co +from agent_framework._clients import OptionsCoT from agent_framework.observability import ChatTelemetryLayer if sys.version_info >= (3, 12): @@ -135,11 +135,11 @@ def _finalize(updates: Sequence[ChatResponseUpdate]) -> ChatResponse: class MockBaseChatClient( - ChatMiddlewareLayer[TOptions_co], - FunctionInvocationLayer[TOptions_co], - ChatTelemetryLayer[TOptions_co], - BaseChatClient[TOptions_co], - Generic[TOptions_co], + ChatMiddlewareLayer[OptionsCoT], + FunctionInvocationLayer[OptionsCoT], + ChatTelemetryLayer[OptionsCoT], + BaseChatClient[OptionsCoT], + Generic[OptionsCoT], ): """Mock implementation of a full-featured ChatClient.""" diff --git a/python/packages/declarative/agent_framework_declarative/_models.py b/python/packages/declarative/agent_framework_declarative/_models.py index 3066848927..3a3968df52 100644 --- a/python/packages/declarative/agent_framework_declarative/_models.py +++ b/python/packages/declarative/agent_framework_declarative/_models.py @@ -230,7 +230,7 @@ def to_json_schema(self) -> dict[str, Any]: return json_schema -TConnection = TypeVar("TConnection", bound="Connection") +ConnectionT = TypeVar("ConnectionT", bound="Connection") class Connection(SerializationMixin): @@ -248,12 +248,12 @@ def __init__( @classmethod def from_dict( - cls: type[TConnection], + cls: type[ConnectionT], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None, - ) -> TConnection: + ) -> ConnectionT: """Create a Connection instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base Connection class if cls is not Connection: @@ -505,7 +505,7 @@ def from_dict( return SerializationMixin.from_dict.__func__(cls, value, dependencies=dependencies) # type: ignore[attr-defined, no-any-return] -TTool = TypeVar("TTool", bound="Tool") +ToolT = TypeVar("ToolT", bound="Tool") class Tool(SerializationMixin): @@ -536,8 +536,8 @@ def __init__( @classmethod def from_dict( - cls: type[TTool], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "TTool": + cls: type[ToolT], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None + ) -> "ToolT": """Create a Tool instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base Tool class if cls is not Tool: diff --git a/python/packages/devui/tests/devui/conftest.py b/python/packages/devui/tests/devui/conftest.py index b229b0e9e6..4d6f818795 100644 --- a/python/packages/devui/tests/devui/conftest.py +++ b/python/packages/devui/tests/devui/conftest.py @@ -29,7 +29,7 @@ Content, ResponseStream, ) -from agent_framework._clients import TOptions_co +from agent_framework._clients import OptionsCoT from agent_framework._workflows._agent_executor import AgentExecutorResponse from agent_framework._workflows._events import ( WorkflowErrorDetails, @@ -88,7 +88,7 @@ async def get_streaming_response( yield ChatResponseUpdate(contents=[Content.from_text(text="test streaming response")], role="assistant") -class MockBaseChatClient(BaseChatClient[TOptions_co], Generic[TOptions_co]): +class MockBaseChatClient(BaseChatClient[OptionsCoT], Generic[OptionsCoT]): """Full BaseChatClient mock with middleware support. Use this when testing features that require the full BaseChatClient interface. diff --git a/python/packages/durabletask/tests/test_durable_entities.py b/python/packages/durabletask/tests/test_durable_entities.py index e4516f1ce3..03e26784cc 100644 --- a/python/packages/durabletask/tests/test_durable_entities.py +++ b/python/packages/durabletask/tests/test_durable_entities.py @@ -26,7 +26,7 @@ ) from agent_framework_durabletask._entities import DurableTaskEntityStateProvider -TState = TypeVar("TState") +StateT = TypeVar("StateT") class MockEntityContext: @@ -37,8 +37,8 @@ def __init__(self, initial_state: Any = None) -> None: def get_state( self, - intended_type: type[TState] | None = None, - default: TState | None = None, + intended_type: type[StateT] | None = None, + default: StateT | None = None, ) -> Any: del intended_type if self._state is None: diff --git a/python/packages/foundry_local/agent_framework_foundry_local/_foundry_local_client.py b/python/packages/foundry_local/agent_framework_foundry_local/_foundry_local_client.py index 0ee6ce4ab0..5cf9e8c85d 100644 --- a/python/packages/foundry_local/agent_framework_foundry_local/_foundry_local_client.py +++ b/python/packages/foundry_local/agent_framework_foundry_local/_foundry_local_client.py @@ -38,13 +38,13 @@ "FoundryLocalSettings", ] -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region Foundry Local Chat Options TypedDict -class FoundryLocalChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class FoundryLocalChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """Azure Foundry Local (local model deployment) chat options dict. Extends base ChatOptions for local model inference via Foundry Local. @@ -104,8 +104,8 @@ class FoundryLocalChatOptions(ChatOptions[TResponseModel], Generic[TResponseMode } """Maps ChatOptions keys to OpenAI API parameter names (for compatibility).""" -TFoundryLocalChatOptions = TypeVar( - "TFoundryLocalChatOptions", +FoundryLocalChatOptionsT = TypeVar( + "FoundryLocalChatOptionsT", bound=TypedDict, # type: ignore[valid-type] default="FoundryLocalChatOptions", covariant=True, @@ -137,11 +137,11 @@ class FoundryLocalSettings(AFBaseSettings): class FoundryLocalClient( - ChatMiddlewareLayer[TFoundryLocalChatOptions], - FunctionInvocationLayer[TFoundryLocalChatOptions], - ChatTelemetryLayer[TFoundryLocalChatOptions], - RawOpenAIChatClient[TFoundryLocalChatOptions], - Generic[TFoundryLocalChatOptions], + ChatMiddlewareLayer[FoundryLocalChatOptionsT], + FunctionInvocationLayer[FoundryLocalChatOptionsT], + ChatTelemetryLayer[FoundryLocalChatOptionsT], + RawOpenAIChatClient[FoundryLocalChatOptionsT], + Generic[FoundryLocalChatOptionsT], ): """Foundry Local Chat completion class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/github_copilot/agent_framework_github_copilot/_agent.py b/python/packages/github_copilot/agent_framework_github_copilot/_agent.py index 8fa7e3c6a2..5d00caf71e 100644 --- a/python/packages/github_copilot/agent_framework_github_copilot/_agent.py +++ b/python/packages/github_copilot/agent_framework_github_copilot/_agent.py @@ -89,15 +89,15 @@ class GitHubCopilotOptions(TypedDict, total=False): """ -TOptions = TypeVar( - "TOptions", +OptionsT = TypeVar( + "OptionsT", bound=TypedDict, # type: ignore[valid-type] default="GitHubCopilotOptions", covariant=True, ) -class GitHubCopilotAgent(BaseAgent, Generic[TOptions]): +class GitHubCopilotAgent(BaseAgent, Generic[OptionsT]): """A GitHub Copilot Agent. This agent wraps the GitHub Copilot SDK to provide Copilot agentic capabilities @@ -154,7 +154,7 @@ def __init__( | MutableMapping[str, Any] | Sequence[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - default_options: TOptions | None = None, + default_options: OptionsT | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, ) -> None: @@ -223,7 +223,7 @@ def __init__( self._default_options = opts self._started = False - async def __aenter__(self) -> "GitHubCopilotAgent[TOptions]": + async def __aenter__(self) -> "GitHubCopilotAgent[OptionsT]": """Start the agent when entering async context.""" await self.start() return self @@ -280,7 +280,7 @@ def run( *, stream: Literal[False] = False, thread: AgentThread | None = None, - options: TOptions | None = None, + options: OptionsT | None = None, **kwargs: Any, ) -> Awaitable[AgentResponse]: ... @@ -291,7 +291,7 @@ def run( *, stream: Literal[True], thread: AgentThread | None = None, - options: TOptions | None = None, + options: OptionsT | None = None, **kwargs: Any, ) -> ResponseStream[AgentResponseUpdate, AgentResponse]: ... @@ -301,7 +301,7 @@ def run( *, stream: bool = False, thread: AgentThread | None = None, - options: TOptions | None = None, + options: OptionsT | None = None, **kwargs: Any, ) -> Awaitable[AgentResponse] | ResponseStream[AgentResponseUpdate, AgentResponse]: """Get a response from the agent. @@ -342,7 +342,7 @@ async def _run_impl( messages: str | ChatMessage | Sequence[str | ChatMessage] | None = None, *, thread: AgentThread | None = None, - options: TOptions | None = None, + options: OptionsT | None = None, **kwargs: Any, ) -> AgentResponse: """Non-streaming implementation of run.""" @@ -390,7 +390,7 @@ async def _stream_updates( messages: str | ChatMessage | Sequence[str | ChatMessage] | None = None, *, thread: AgentThread | None = None, - options: TOptions | None = None, + options: OptionsT | None = None, **kwargs: Any, ) -> AsyncIterable[AgentResponseUpdate]: """Internal method to stream updates from GitHub Copilot. diff --git a/python/packages/ollama/agent_framework_ollama/_chat_client.py b/python/packages/ollama/agent_framework_ollama/_chat_client.py index 6b4b55faac..4cc84c2ae9 100644 --- a/python/packages/ollama/agent_framework_ollama/_chat_client.py +++ b/python/packages/ollama/agent_framework_ollama/_chat_client.py @@ -63,13 +63,13 @@ __all__ = ["OllamaChatClient", "OllamaChatOptions"] -TResponseModel = TypeVar("TResponseModel", bound=BaseModel | None, default=None) +ResponseModelT = TypeVar("ResponseModelT", bound=BaseModel | None, default=None) # region Ollama Chat Options TypedDict -class OllamaChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], total=False): +class OllamaChatOptions(ChatOptions[ResponseModelT], Generic[ResponseModelT], total=False): """Ollama-specific chat options dict. Extends base ChatOptions with Ollama-specific parameters. @@ -270,7 +270,7 @@ class OllamaChatOptions(ChatOptions[TResponseModel], Generic[TResponseModel], to } """Maps ChatOptions keys to Ollama model option parameter names.""" -TOllamaChatOptions = TypeVar("TOllamaChatOptions", bound=TypedDict, default="OllamaChatOptions", covariant=True) # type: ignore[valid-type] +OllamaChatOptionsT = TypeVar("OllamaChatOptionsT", bound=TypedDict, default="OllamaChatOptions", covariant=True) # type: ignore[valid-type] # endregion @@ -289,10 +289,10 @@ class OllamaSettings(AFBaseSettings): class OllamaChatClient( - ChatMiddlewareLayer[TOllamaChatOptions], - FunctionInvocationLayer[TOllamaChatOptions], - ChatTelemetryLayer[TOllamaChatOptions], - BaseChatClient[TOllamaChatOptions], + ChatMiddlewareLayer[OllamaChatOptionsT], + FunctionInvocationLayer[OllamaChatOptionsT], + ChatTelemetryLayer[OllamaChatOptionsT], + BaseChatClient[OllamaChatOptionsT], ): """Ollama Chat completion class with middleware, telemetry, and function invocation support.""" diff --git a/python/packages/purview/agent_framework_purview/_models.py b/python/packages/purview/agent_framework_purview/_models.py index e4c27496a9..f98e05105f 100644 --- a/python/packages/purview/agent_framework_purview/_models.py +++ b/python/packages/purview/agent_framework_purview/_models.py @@ -175,7 +175,7 @@ def translate_activity(activity: Activity) -> ProtectionScopeActivities: # Simple value models # -------------------------------------------------------------------------------------- -TAliasSerializable = TypeVar("TAliasSerializable", bound="_AliasSerializable") +AliasSerializableT = TypeVar("AliasSerializableT", bound="_AliasSerializable") class _AliasSerializable(SerializationMixin): @@ -230,7 +230,7 @@ def model_dump_json(self, *, by_alias: bool = True, exclude_none: bool = True, * return json.dumps(self.model_dump(by_alias=by_alias, exclude_none=exclude_none, **kwargs)) @classmethod - def model_validate(cls: type[TAliasSerializable], value: MutableMapping[str, Any]) -> TAliasSerializable: # type: ignore[name-defined] + def model_validate(cls: type[AliasSerializableT], value: MutableMapping[str, Any]) -> AliasSerializableT: # type: ignore[name-defined] return cls(**value) # ------------------------------------------------------------------ diff --git a/python/samples/getting_started/chat_client/custom_chat_client.py b/python/samples/getting_started/chat_client/custom_chat_client.py index af56e5456f..149b7230e1 100644 --- a/python/samples/getting_started/chat_client/custom_chat_client.py +++ b/python/samples/getting_started/chat_client/custom_chat_client.py @@ -17,7 +17,7 @@ ResponseStream, Role, ) -from agent_framework._clients import TOptions_co +from agent_framework._clients import OptionsCoT from agent_framework.observability import ChatTelemetryLayer if sys.version_info >= (3, 13): @@ -38,7 +38,7 @@ """ -class EchoingChatClient(BaseChatClient[TOptions_co], Generic[TOptions_co]): +class EchoingChatClient(BaseChatClient[OptionsCoT], Generic[OptionsCoT]): """A custom chat client that echoes messages back with modifications. This demonstrates how to implement a custom chat client by extending BaseChatClient @@ -112,11 +112,11 @@ async def _stream() -> AsyncIterable[ChatResponseUpdate]: class EchoingChatClientWithLayers( # type: ignore[misc,type-var] - ChatMiddlewareLayer[TOptions_co], - ChatTelemetryLayer[TOptions_co], - FunctionInvocationLayer[TOptions_co], - EchoingChatClient[TOptions_co], - Generic[TOptions_co], + ChatMiddlewareLayer[OptionsCoT], + ChatTelemetryLayer[OptionsCoT], + FunctionInvocationLayer[OptionsCoT], + EchoingChatClient[OptionsCoT], + Generic[OptionsCoT], ): """Echoing chat client that explicitly composes middleware, telemetry, and function layers.""" diff --git a/python/samples/getting_started/observability/configure_otel_providers_with_parameters.py b/python/samples/getting_started/observability/configure_otel_providers_with_parameters.py index a5b0b3d7a8..e82cbdb2be 100644 --- a/python/samples/getting_started/observability/configure_otel_providers_with_parameters.py +++ b/python/samples/getting_started/observability/configure_otel_providers_with_parameters.py @@ -106,9 +106,15 @@ async def main(scenario: Literal["chat_client", "chat_client_stream", "tool", "a # Create custom OTLP exporters with specific configuration # Note: You need to install opentelemetry-exporter-otlp-proto-grpc or -http separately try: - from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter - from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter - from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter + from opentelemetry.exporter.otlp.proto.grpc._log_exporter import ( # pyright: ignore[reportMissingImports] + OTLPLogExporter, + ) + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( # pyright: ignore[reportMissingImports] + OTLPMetricExporter, + ) + from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( # pyright: ignore[reportMissingImports] + OTLPSpanExporter, + ) # Create exporters with custom configuration # These will be added to any exporters configured via environment variables From ff56ead36e131fb94d6994cbd13ddabf6021d2df Mon Sep 17 00:00:00 2001 From: Giles Odigwe Date: Mon, 9 Feb 2026 12:11:25 -0800 Subject: [PATCH 2/2] addressed copilot comments --- .../azure-ai/agent_framework_azure_ai/_agent_provider.py | 2 +- .../azure-ai/agent_framework_azure_ai/_project_provider.py | 2 +- .../packages/core/agent_framework/openai/_assistant_provider.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py index bda50e9a47..dcc0e9db29 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py @@ -38,7 +38,7 @@ from typing_extensions import TypedDict # type: ignore # pragma: no cover -# Type variable for options - allows typed ChatAgent[TOptions] returns +# Type variable for options - allows typed ChatAgent[OptionsCoT] returns # Default matches AzureAIAgentClient's default options type OptionsCoT = TypeVar( "OptionsCoT", diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py index 53b94670a9..e486a14560 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py @@ -47,7 +47,7 @@ logger = get_logger("agent_framework.azure") -# Type variable for options - allows typed ChatAgent[TOptions] returns +# Type variable for options - allows typed ChatAgent[OptionsT] returns # Default matches AzureAIClient's default options type OptionsCoT = TypeVar( "OptionsCoT", diff --git a/python/packages/core/agent_framework/openai/_assistant_provider.py b/python/packages/core/agent_framework/openai/_assistant_provider.py index 16a4259216..7b662e4c2a 100644 --- a/python/packages/core/agent_framework/openai/_assistant_provider.py +++ b/python/packages/core/agent_framework/openai/_assistant_provider.py @@ -33,7 +33,7 @@ __all__ = ["OpenAIAssistantProvider"] -# Type variable for options - allows typed ChatAgent[TOptions] returns +# Type variable for options - allows typed OpenAIAssistantProvider[OptionsCoT] returns # Default matches OpenAIAssistantsClient's default options type OptionsCoT = TypeVar( "OptionsCoT",