Skip to content

Commit bbaf033

Browse files
committed
feat: use create_agent
Signed-off-by: Tyler Slaton <tyler@copilotkit.ai>
1 parent f32e1dc commit bbaf033

File tree

6 files changed

+271
-321
lines changed

6 files changed

+271
-321
lines changed

agent/main.py

Lines changed: 12 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,9 @@
55

66
from typing import List
77

8-
from copilotkit import CopilotKitState
98
from langchain.tools import tool
10-
from langchain_core.messages import BaseMessage, SystemMessage
11-
from langchain_core.runnables import RunnableConfig
12-
from langchain_openai import ChatOpenAI
13-
from langgraph.graph import END, StateGraph
14-
from langgraph.prebuilt import ToolNode
15-
from langgraph.types import Command
16-
17-
18-
class AgentState(CopilotKitState):
19-
proverbs: List[str]
20-
# your_custom_agent_state: str = ""
21-
9+
from langchain.agents import create_agent
10+
from copilotkit import CopilotKitMiddleware, CopilotKitState
2211

2312
@tool
2413
def get_weather(location: str):
@@ -27,77 +16,15 @@ def get_weather(location: str):
2716
"""
2817
return f"The weather for {location} is 70 degrees."
2918

19+
class AgentState(CopilotKitState):
20+
proverbs: List[str]
3021

31-
# Extract tool names from backend_tools for comparison
32-
backend_tools = [get_weather]
33-
backend_tool_names = [tool.name for tool in backend_tools]
34-
35-
36-
async def chat_node(state: AgentState, config: RunnableConfig) -> Command[str]:
37-
# 1. Define the model
38-
model = ChatOpenAI(model="gpt-4.1-mini")
39-
40-
# 2. Bind the tools to the model
41-
model_with_tools = model.bind_tools(
42-
[
43-
*state.get("copilotkit", {}).get("actions", []),
44-
*backend_tools,
45-
],
46-
parallel_tool_calls=False,
47-
)
48-
49-
# 3. Define the system message by which the chat model will be run
50-
system_message = SystemMessage(
51-
content=f"You are a helpful assistant. The current proverbs are {state.get('proverbs', [])}."
52-
)
53-
54-
# 4. Run the model to generate a response
55-
response = await model_with_tools.ainvoke(
56-
[
57-
system_message,
58-
*state["messages"],
59-
],
60-
config,
61-
)
62-
63-
# only route to tool node if tool is not in the tools list
64-
if route_to_tool_node(response):
65-
print("routing to tool node")
66-
return Command(
67-
goto="tool_node",
68-
update={
69-
"messages": [response],
70-
},
71-
)
72-
73-
# 5. We've handled all tool calls, so we can end the graph.
74-
return Command(
75-
goto=END,
76-
update={
77-
"messages": [response],
78-
},
79-
)
80-
81-
82-
def route_to_tool_node(response: BaseMessage):
83-
"""
84-
Route to tool node if any tool call in the response matches a backend tool name.
85-
"""
86-
tool_calls = getattr(response, "tool_calls", None)
87-
if not tool_calls:
88-
return False
89-
90-
for tool_call in tool_calls:
91-
if tool_call.get("name") in backend_tool_names:
92-
return True
93-
return False
94-
95-
96-
# Define the workflow graph
97-
workflow = StateGraph(AgentState)
98-
workflow.add_node("chat_node", chat_node)
99-
workflow.add_node("tool_node", ToolNode(tools=backend_tools))
100-
workflow.add_edge("tool_node", "chat_node")
101-
workflow.set_entry_point("chat_node")
22+
agent = create_agent(
23+
model="gpt-4.1-mini",
24+
tools=[get_weather],
25+
middleware=[CopilotKitMiddleware()],
26+
state_schema=AgentState,
27+
system_prompt="You are a helpful research assistant."
28+
)
10229

103-
graph = workflow.compile()
30+
graph = agent

0 commit comments

Comments
 (0)