From 921321765c067bb6afe0ad9ef43b06530a2ed8e3 Mon Sep 17 00:00:00 2001 From: Cecile FU Date: Mon, 9 Feb 2026 17:21:46 +0800 Subject: [PATCH] Fix: streaming Response Example in ollama_with_openai_chat_client --- .../agents/ollama/ollama_with_openai_chat_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/samples/getting_started/agents/ollama/ollama_with_openai_chat_client.py b/python/samples/getting_started/agents/ollama/ollama_with_openai_chat_client.py index da2468cb22..169db82e1a 100644 --- a/python/samples/getting_started/agents/ollama/ollama_with_openai_chat_client.py +++ b/python/samples/getting_started/agents/ollama/ollama_with_openai_chat_client.py @@ -68,7 +68,7 @@ async def streaming_example() -> None: query = "What's the weather like in Portland?" print(f"User: {query}") print("Agent: ", end="", flush=True) - async for chunk in agent.run(query, stream=True): + async for chunk in agent.run_stream(query, stream=True): if chunk.text: print(chunk.text, end="", flush=True) print("\n")