Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 150 additions & 7 deletions src/strands/experimental/agent_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,46 @@
agent = config_to_agent("config.json")
# Add tools that need code-based instantiation
agent.tool_registry.process_tools([ToolWithConfigArg(HttpsConnection("localhost"))])

The ``model`` field supports two formats:

**String format (backward compatible — defaults to Bedrock):**
{"model": "us.anthropic.claude-sonnet-4-20250514-v1:0"}

**Object format (supports all providers):**
{
"model": {
"provider": "anthropic",
"model_id": "claude-sonnet-4-20250514",
"max_tokens": 10000,
"client_args": {"api_key": "$ANTHROPIC_API_KEY"}
}
}

Environment variable references (``$VAR`` or ``${VAR}``) in model config values are resolved
automatically before provider instantiation.

Note: The following constructor parameters cannot be specified from JSON because they require
code-based instantiation: ``boto_session`` (Bedrock, SageMaker), ``client`` (OpenAI, Gemini),
``gemini_tools`` (Gemini). Use ``region_name`` / ``client_args`` as JSON-friendly alternatives.
"""

from __future__ import annotations

import json
import os
import re
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any

import jsonschema
from jsonschema import ValidationError

if TYPE_CHECKING:
from ..models.model import Model

# JSON Schema for agent configuration
AGENT_CONFIG_SCHEMA = {

Check warning on line 51 in src/strands/experimental/agent_config.py

View workflow job for this annotation

GitHub Actions / check-api

AGENT_CONFIG_SCHEMA

Attribute value was changed: `{'$schema': 'http://json-schema.org/draft-07/schema#', 'title': 'Agent Configuration', 'description': 'Configuration schema for creating agents', 'type': 'object', 'properties': {'name': {'description': 'Name of the agent', 'type': ['string', 'null'], 'default': None}, 'model': {'description': 'The model ID to use for this agent. If not specified, uses the default model.', 'type': ['string', 'null'], 'default': None}, 'prompt': {'description': 'The system prompt for the agent. Provides high level context to the agent.', 'type': ['string', 'null'], 'default': None}, 'tools': {'description': 'List of tools the agent can use. Can be file paths, Python module names, or @tool annotated functions in files.', 'type': 'array', 'items': {'type': 'string'}, 'default': []}}, 'additionalProperties': False}` -> `{'$schema': 'http://json-schema.org/draft-07/schema#', 'title': 'Agent Configuration', 'description': 'Configuration schema for creating agents', 'type': 'object', 'properties': {'name': {'description': 'Name of the agent', 'type': ['string', 'null'], 'default': None}, 'model': {'description': "The model to use for this agent. Can be a string (Bedrock model_id) or an object with a 'provider' field for any supported provider.", 'oneOf': [{'type': 'string'}, {'type': 'null'}, {'type': 'object', 'properties': {'provider': {'description': 'The model provider name', 'type': 'string'}}, 'required': ['provider'], 'additionalProperties': True}], 'default': None}, 'prompt': {'description': 'The system prompt for the agent. Provides high level context to the agent.', 'type': ['string', 'null'], 'default': None}, 'tools': {'description': 'List of tools the agent can use. Can be file paths, Python module names, or @tool annotated functions in files.', 'type': 'array', 'items': {'type': 'string'}, 'default': []}}, 'additionalProperties': False}`
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Agent Configuration",
"description": "Configuration schema for creating agents",
Expand All @@ -27,8 +56,25 @@
"properties": {
"name": {"description": "Name of the agent", "type": ["string", "null"], "default": None},
"model": {
"description": "The model ID to use for this agent. If not specified, uses the default model.",
"type": ["string", "null"],
"description": (
"The model to use for this agent. Can be a string (Bedrock model_id) "
"or an object with a 'provider' field for any supported provider."
),
"oneOf": [
{"type": "string"},
{"type": "null"},
{
"type": "object",
"properties": {
"provider": {
"description": "The model provider name",
"type": "string",
}
},
"required": ["provider"],
"additionalProperties": True,
},
],
"default": None,
},
"prompt": {
Expand All @@ -50,8 +96,90 @@
# Pre-compile validator for better performance
_VALIDATOR = jsonschema.Draft7Validator(AGENT_CONFIG_SCHEMA)

# Only full-string env var references are resolved (no inline interpolation).
# "prefix-$VAR" is NOT resolved; construct values programmatically instead.
_ENV_VAR_PATTERN = re.compile(r"^\$\{([^}]+)\}$|^\$([A-Za-z_][A-Za-z0-9_]*)$")
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: The regex ^\$\{([^}]+)\}$|^\$([A-Za-z_][A-Za-z0-9_]*)$ only matches full-string env var references (anchored with ^ and $). This means "prefix-$VAR-suffix" won't be resolved, which may surprise users coming from shell-like environments.

Suggestion: This is a reasonable design choice for security and simplicity, but it should be explicitly documented — either in the module docstring or as a comment near the pattern. Something like:

# Only full-string env var references are resolved (no inline interpolation).
# "prefix-$VAR" is NOT resolved; use the object format to construct values programmatically.

Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Addressed in the latest revision — there is now an inline comment above _ENV_VAR_PATTERN documenting this behavior.


def config_to_agent(config: str | dict[str, Any], **kwargs: dict[str, Any]) -> Any:
# Provider name to model class name — resolved via strands.models lazy __getattr__
PROVIDER_MAP: dict[str, str] = {
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: PROVIDER_MAP maps provider names to string function names, which are then resolved via globals()[factory_name] on line 438. This pattern is fragile — it breaks silently if a function is renamed or removed, and bypasses static analysis tools and IDE navigation.

Suggestion: Map directly to function references instead:

PROVIDER_MAP: dict[str, Callable[[dict[str, Any]], Any]] = {
    "bedrock": _create_bedrock_model,
    "anthropic": _create_anthropic_model,
    ...
}

This requires moving PROVIDER_MAP below the factory function definitions, but it gives you type safety, IDE go-to-definition, and eliminates the globals() lookup.

Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This has been addressed in the latest revision. PROVIDER_MAP now maps to class name strings resolved via getattr(models, class_name), which leverages the existing lazy __getattr__ pattern in models/__init__.py. The factory logic has been moved to from_dict classmethods on each model class. This is a clean approach that works well with the lazy-loading architecture.

"bedrock": "BedrockModel",
"anthropic": "AnthropicModel",
"openai": "OpenAIModel",
"gemini": "GeminiModel",
"ollama": "OllamaModel",
"litellm": "LiteLLMModel",
"mistral": "MistralModel",
"llamaapi": "LlamaAPIModel",
"llamacpp": "LlamaCppModel",
"sagemaker": "SageMakerAIModel",
"writer": "WriterModel",
"openai_responses": "OpenAIResponsesModel",
}


def _resolve_env_vars(value: Any) -> Any:
"""Recursively resolve environment variable references in config values.

String values matching ``$VAR_NAME`` or ``${VAR_NAME}`` are replaced with the
corresponding environment variable value. Dicts and lists are traversed recursively.

Args:
value: The value to resolve. Can be a string, dict, list, or any other type.

Returns:
The resolved value with environment variable references replaced.

Raises:
ValueError: If a referenced environment variable is not set.
"""
if isinstance(value, str):
match = _ENV_VAR_PATTERN.match(value)
if match:
var_name = match.group(1) or match.group(2)
env_value = os.environ.get(var_name)
if env_value is None:
raise ValueError(f"Environment variable '{var_name}' is not set")
return env_value
return value
if isinstance(value, dict):
return {k: _resolve_env_vars(v) for k, v in value.items()}
if isinstance(value, list):
return [_resolve_env_vars(item) for item in value]
return value


def _create_model_from_dict(model_config: dict[str, Any]) -> "Model":
"""Create a Model instance from a provider config dict.

Routes the config to the appropriate model class based on the ``provider`` field,
then delegates to the class's ``from_dict`` method. All imports are lazy to avoid
requiring optional dependencies that are not installed.

Args:
model_config: Dict containing at least a ``provider`` key and provider-specific params.

Returns:
A configured Model instance for the specified provider.

Raises:
ValueError: If the provider name is not recognized.
ImportError: If the provider's optional dependencies are not installed.
"""
config = model_config.copy()
provider = config.pop("provider")

class_name = PROVIDER_MAP.get(provider)
if class_name is None:
supported = ", ".join(sorted(PROVIDER_MAP.keys()))
raise ValueError(f"Unknown model provider: '{provider}'. Supported providers: {supported}")

from .. import models

model_cls = getattr(models, class_name)
return model_cls.from_dict(config)


def config_to_agent(config: str | dict[str, Any], **kwargs: Any) -> Any:
"""Create an Agent from a configuration file or dictionary.

This function supports tools that can be loaded declaratively (file paths, module names,
Expand Down Expand Up @@ -83,6 +211,12 @@
Create agent from dictionary:
>>> config = {"model": "anthropic.claude-3-5-sonnet-20241022-v2:0", "tools": ["calculator"]}
>>> agent = config_to_agent(config)

Create agent with object model config:
>>> config = {
... "model": {"provider": "openai", "model_id": "gpt-4o", "client_args": {"api_key": "$OPENAI_API_KEY"}}
... }
>>> agent = config_to_agent(config)
"""
# Parse configuration
if isinstance(config, str):
Expand Down Expand Up @@ -114,11 +248,20 @@
raise ValueError(f"Configuration validation error at {error_path}: {e.message}") from e

# Prepare Agent constructor arguments
agent_kwargs = {}
agent_kwargs: dict[str, Any] = {}

# Handle model field — string vs object format
model_value = config_dict.get("model")
if isinstance(model_value, dict):
# Object format: resolve env vars and create Model instance via factory
resolved_config = _resolve_env_vars(model_value)
agent_kwargs["model"] = _create_model_from_dict(resolved_config)
elif model_value is not None:
# String format (backward compat): pass directly as model_id to Agent
agent_kwargs["model"] = model_value

# Map configuration keys to Agent constructor parameters
# Map remaining configuration keys to Agent constructor parameters
config_mapping = {
"model": "model",
"prompt": "system_prompt",
"tools": "tools",
"name": "name",
Expand Down
28 changes: 28 additions & 0 deletions src/strands/models/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,34 @@ class BedrockConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "BedrockModel":
"""Create a BedrockModel from a configuration dictionary.

Handles extraction of ``region_name``, ``endpoint_url``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured BedrockModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}

if "region_name" in config:
kwargs["region_name"] = config.pop("region_name")
if "endpoint_url" in config:
kwargs["endpoint_url"] = config.pop("endpoint_url")
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw

kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
*,
Expand Down
22 changes: 22 additions & 0 deletions src/strands/models/llamacpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,28 @@ class LlamaCppConfig(TypedDict, total=False):
model_id: str
params: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "LlamaCppModel":
"""Create a LlamaCppModel from a configuration dictionary.

Handles extraction of ``base_url`` and ``timeout`` as separate constructor parameters.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured LlamaCppModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}
if "base_url" in config:
kwargs["base_url"] = config.pop("base_url")
if "timeout" in config:
kwargs["timeout"] = config.pop("timeout")
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
base_url: str = "http://localhost:8080",
Expand Down
24 changes: 24 additions & 0 deletions src/strands/models/mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,30 @@ class MistralConfig(TypedDict, total=False):
top_p: float | None
stream: bool | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "MistralModel":
"""Create a MistralModel from a configuration dictionary.

Handles extraction of ``api_key`` and ``client_args`` as separate constructor parameters.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured MistralModel instance.
"""
config = config.copy()
api_key = config.pop("api_key", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if api_key is not None:
kwargs["api_key"] = api_key
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
api_key: str | None = None,
Expand Down
25 changes: 25 additions & 0 deletions src/strands/models/model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Abstract base class for Agent model providers."""

from __future__ import annotations

import abc
import logging
from collections.abc import AsyncGenerator, AsyncIterable
Expand Down Expand Up @@ -51,6 +53,29 @@ def stateful(self) -> bool:
"""
return False

@classmethod
def from_dict(cls, config: dict[str, Any]) -> Model:
"""Create a Model instance from a configuration dictionary.

The default implementation extracts ``client_args`` (if present) and passes
all remaining keys as keyword arguments to the constructor. Subclasses with
non-standard constructor signatures should override this method.

Args:
config: Provider-specific configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured Model instance.
"""
config = config.copy()
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

@abc.abstractmethod
# pragma: no cover
def update_config(self, **model_config: Any) -> None:
Expand Down
23 changes: 23 additions & 0 deletions src/strands/models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,29 @@ class OllamaConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "OllamaModel":
"""Create an OllamaModel from a configuration dictionary.

Handles extraction of ``host`` as a positional argument and mapping of
``client_args`` to the ``ollama_client_args`` constructor parameter.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured OllamaModel instance.
"""
config = config.copy()
host = config.pop("host", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["ollama_client_args"] = client_args
kwargs.update(config)
return cls(host, **kwargs)

def __init__(
self,
host: str | None,
Expand Down
26 changes: 26 additions & 0 deletions src/strands/models/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,32 @@ class SageMakerAIEndpointConfig(TypedDict, total=False):
target_variant: str | None | None
additional_args: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "SageMakerAIModel":
"""Create a SageMakerAIModel from a configuration dictionary.

Handles extraction of ``endpoint_config``, ``payload_config``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary. A copy is made internally;
the caller's dict is not modified.

Returns:
A configured SageMakerAIModel instance.
"""
config = config.copy()
kwargs: dict[str, Any] = {}
kwargs["endpoint_config"] = config.pop("endpoint_config", {})
kwargs["payload_config"] = config.pop("payload_config", {})
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw
if config:
unexpected = ", ".join(sorted(config.keys()))
raise ValueError(f"Unsupported SageMaker config keys: {unexpected}")
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: The new ValueError for unexpected SageMaker config keys is a good addition, but there's no test covering this error path.

Suggestion: Add a test (in the appropriate model test file) like:

def test_sagemaker_from_dict_rejects_unexpected_keys(self):
    with pytest.raises(ValueError, match="Unsupported SageMaker config keys"):
        SageMakerAIModel.from_dict({
            "endpoint_config": {},
            "payload_config": {},
            "model_id": "unexpected",
        })

return cls(**kwargs)

def __init__(
self,
endpoint_config: SageMakerAIEndpointConfig,
Expand Down
Loading
Loading