Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 15 additions & 10 deletions examples/model_providers/README.md
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
# Custom LLM providers
# Model provider examples

The examples in this directory demonstrate how you might use a non-OpenAI LLM provider. To run them, first set a base URL, API key and model.
The examples in this directory show how to route models through adapter layers such as LiteLLM and
any-llm. The default examples all use OpenRouter so you only need one API key:

```bash
export EXAMPLE_BASE_URL="..."
export EXAMPLE_API_KEY="..."
export EXAMPLE_MODEL_NAME"..."
export OPENROUTER_API_KEY="..."
```

Then run the examples, e.g.:
Run one of the adapter examples:

```bash
uv run examples/model_providers/any_llm_provider.py
uv run examples/model_providers/any_llm_auto.py
uv run examples/model_providers/litellm_provider.py
uv run examples/model_providers/litellm_auto.py
```
python examples/model_providers/custom_example_provider.py

Loops within themselves,
Function calls its own being,
Depth without ending.
Direct-model examples let you override the target model:

```bash
uv run examples/model_providers/any_llm_provider.py --model openrouter/openai/gpt-5.4-mini
uv run examples/model_providers/litellm_provider.py --model openrouter/openai/gpt-5.4-mini
```
50 changes: 50 additions & 0 deletions examples/model_providers/any_llm_auto.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
from __future__ import annotations

import asyncio

from pydantic import BaseModel

from agents import Agent, ModelSettings, Runner, function_tool, set_tracing_disabled

"""This example uses the built-in any-llm routing through OpenRouter.
Set OPENROUTER_API_KEY before running it.
"""

set_tracing_disabled(disabled=True)


@function_tool
def get_weather(city: str):
print(f"[debug] getting weather for {city}")
return f"The weather in {city} is sunny."


class Result(BaseModel):
output_text: str
tool_results: list[str]


async def main():
agent = Agent(
name="Assistant",
instructions="You only respond in haikus.",
model="any-llm/openrouter/openai/gpt-5.4-mini",
tools=[get_weather],
model_settings=ModelSettings(tool_choice="required"),
output_type=Result,
)

result = await Runner.run(agent, "What's the weather in Tokyo?")
print(result.final_output)


if __name__ == "__main__":
import os

if os.getenv("OPENROUTER_API_KEY") is None:
raise ValueError(
"OPENROUTER_API_KEY is not set. Please set the environment variable and try again."
)

asyncio.run(main())
58 changes: 58 additions & 0 deletions examples/model_providers/any_llm_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from __future__ import annotations

import asyncio
import os

from agents import Agent, Runner, function_tool, set_tracing_disabled
from agents.extensions.models.any_llm_model import AnyLLMModel

"""This example uses the AnyLLMModel directly.
You can run it like this:
uv run examples/model_providers/any_llm_provider.py --model openrouter/openai/gpt-5.4-mini
or
uv run examples/model_providers/any_llm_provider.py --model openrouter/anthropic/claude-4.5-sonnet
"""

set_tracing_disabled(disabled=True)


@function_tool
def get_weather(city: str):
print(f"[debug] getting weather for {city}")
return f"The weather in {city} is sunny."


async def main(model: str, api_key: str):
if api_key == "dummy":
print("Skipping run because no valid OPENROUTER_API_KEY was provided.")
return

agent = Agent(
name="Assistant",
instructions="You only respond in haikus.",
model=AnyLLMModel(model=model, api_key=api_key),
tools=[get_weather],
)

result = await Runner.run(agent, "What's the weather in Tokyo?")
print(result.final_output)


if __name__ == "__main__":
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("--model", type=str, required=False)
parser.add_argument("--api-key", type=str, required=False)
args = parser.parse_args()

model = args.model or os.environ.get("ANY_LLM_MODEL", "openrouter/openai/gpt-5.4-mini")
api_key = args.api_key or os.environ.get("OPENROUTER_API_KEY", "dummy")

if not args.model:
print(f"Using default model: {model}")
if not args.api_key:
print("Using OPENROUTER_API_KEY from environment (or dummy placeholder).")

asyncio.run(main(model, api_key))
11 changes: 6 additions & 5 deletions examples/model_providers/litellm_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@

from agents import Agent, ModelSettings, Runner, function_tool, set_tracing_disabled

"""This example uses the built-in support for LiteLLM. To use this, ensure you have the
ANTHROPIC_API_KEY environment variable set.
"""This example uses the built-in support for LiteLLM through OpenRouter.
Set OPENROUTER_API_KEY before running it.
"""

set_tracing_disabled(disabled=True)
Expand All @@ -32,7 +33,7 @@ async def main():
name="Assistant",
instructions="You only respond in haikus.",
# We prefix with litellm/ to tell the Runner to use the LitellmModel
model="litellm/anthropic/claude-sonnet-4-5-20250929",
model="litellm/openrouter/openai/gpt-5.4-mini",
tools=[get_weather],
model_settings=ModelSettings(tool_choice="required"),
output_type=Result,
Expand All @@ -45,9 +46,9 @@ async def main():
if __name__ == "__main__":
import os

if os.getenv("ANTHROPIC_API_KEY") is None:
if os.getenv("OPENROUTER_API_KEY") is None:
raise ValueError(
"ANTHROPIC_API_KEY is not set. Please set it the environment variable and try again."
"OPENROUTER_API_KEY is not set. Please set the environment variable and try again."
)

asyncio.run(main())
12 changes: 6 additions & 6 deletions examples/model_providers/litellm_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

"""This example uses the LitellmModel directly, to hit any model provider.
You can run it like this:
uv run examples/model_providers/litellm_provider.py --model anthropic/claude-3-5-sonnet-20240620
uv run examples/model_providers/litellm_provider.py --model openrouter/openai/gpt-5.4-mini
or
uv run examples/model_providers/litellm_provider.py --model gemini/gemini-2.0-flash
uv run examples/model_providers/litellm_provider.py --model openrouter/anthropic/claude-4.5-sonnet

Find more providers here: https://docs.litellm.ai/docs/providers
"""
Expand All @@ -26,7 +26,7 @@ def get_weather(city: str):

async def main(model: str, api_key: str):
if api_key == "dummy":
print("Skipping run because no valid LITELLM_API_KEY was provided.")
print("Skipping run because no valid OPENROUTER_API_KEY was provided.")
return
agent = Agent(
name="Assistant",
Expand All @@ -48,12 +48,12 @@ async def main(model: str, api_key: str):
parser.add_argument("--api-key", type=str, required=False)
args = parser.parse_args()

model = args.model or os.environ.get("LITELLM_MODEL", "openai/gpt-4o-mini")
api_key = args.api_key or os.environ.get("LITELLM_API_KEY", "dummy")
model = args.model or os.environ.get("LITELLM_MODEL", "openrouter/openai/gpt-5.4-mini")
api_key = args.api_key or os.environ.get("OPENROUTER_API_KEY", "dummy")

if not args.model:
print(f"Using default model: {model}")
if not args.api_key:
print("Using LITELLM_API_KEY from environment (or dummy placeholder).")
print("Using OPENROUTER_API_KEY from environment (or dummy placeholder).")

asyncio.run(main(model, api_key))
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ Repository = "https://github.com/openai/openai-agents-python"
voice = ["numpy>=2.2.0, <3; python_version>='3.10'", "websockets>=15.0, <16"]
viz = ["graphviz>=0.17"]
litellm = ["litellm>=1.81.0, <2"]
any-llm = ["any-llm-sdk>=1.11.0, <2; python_version >= '3.11'"]
realtime = ["websockets>=15.0, <16"]
sqlalchemy = ["SQLAlchemy>=2.0", "asyncpg>=0.29.0"]
encrypt = ["cryptography>=45.0, <46"]
Expand Down
Loading
Loading