33import asyncio
44import json
55import os
6+ from collections .abc import AsyncIterable
67from dataclasses import dataclass , field
78from typing import Annotated
89
1213 AgentExecutorRequest ,
1314 AgentExecutorResponse ,
1415 AgentResponse ,
15- AgentResponseUpdate ,
1616 Executor ,
1717 Message ,
1818 WorkflowBuilder ,
@@ -246,6 +246,31 @@ def display_agent_run_update(event: WorkflowEvent, last_executor: str | None) ->
246246 print (update , end = "" , flush = True )
247247
248248
249+ async def consume_stream (stream : AsyncIterable [WorkflowEvent ]) -> dict [str , str ] | None :
250+ """Consume a workflow event stream, printing outputs and returning any pending human responses."""
251+ requests : list [WorkflowEvent ] = []
252+ async for event in stream :
253+ if event .type == "request_info" and isinstance (event .data , DraftFeedbackRequest ):
254+ # Stash the request so we can prompt the human after the stream completes.
255+ requests .append (event )
256+
257+ if requests :
258+ pending_responses : dict [str , str ] = {}
259+ for request in requests :
260+ print ("\n ----- Writer draft -----" )
261+ print (request .data .draft_text .strip ())
262+ print ("\n Provide guidance for the editor (or 'approve' to accept the draft)." )
263+ answer = input ("Human feedback: " ).strip () # noqa: ASYNC250
264+ if answer .lower () == "exit" :
265+ print ("Exiting..." )
266+ exit (0 )
267+ pending_responses [request .request_id ] = answer
268+
269+ return pending_responses
270+
271+ return None
272+
273+
249274async def main () -> None :
250275 """Run the workflow and bridge human feedback between two agents."""
251276
@@ -267,66 +292,23 @@ async def main() -> None:
267292 .build ()
268293 )
269294
270- # Switch to turn on agent run update display.
271- # By default this is off to reduce clutter during human input.
272- display_agent_run_update_switch = False
273-
274295 print (
275296 "Interactive mode. When prompted, provide a short feedback note for the editor." ,
276297 flush = True ,
277298 )
278299
279- pending_responses : dict [str , str ] | None = None
280- completed = False
281- initial_run = True
300+ # Initiate the first run of the workflow.
301+ # Runs are not isolated; state is preserved across multiple calls to run.
302+ stream = workflow .run (
303+ "Create a short launch blurb for the LumenX desk lamp. Emphasize adjustability and warm lighting." ,
304+ stream = True ,
305+ )
306+ pending_responses = await consume_stream (stream )
282307
283- while not completed :
284- last_executor : str | None = None
285- if initial_run :
286- stream = workflow .run (
287- "Create a short launch blurb for the LumenX desk lamp. Emphasize adjustability and warm lighting." ,
288- stream = True ,
289- )
290- initial_run = False
291- elif pending_responses is not None :
292- stream = workflow .run (stream = True , responses = pending_responses )
293- pending_responses = None
294- else :
295- break
296-
297- requests : list [tuple [str , DraftFeedbackRequest ]] = []
298-
299- async for event in stream :
300- if (
301- event .type == "output"
302- and isinstance (event .data , AgentResponseUpdate )
303- and display_agent_run_update_switch
304- ):
305- display_agent_run_update (event , last_executor )
306- if event .type == "request_info" and isinstance (event .data , DraftFeedbackRequest ):
307- # Stash the request so we can prompt the human after the stream completes.
308- requests .append ((event .request_id , event .data ))
309- last_executor = None
310- elif event .type == "output" and not isinstance (event .data , AgentResponseUpdate ):
311- # Only mark as completed for final outputs, not streaming updates
312- last_executor = None
313- response = event .data
314- final_text = getattr (response , "text" , str (response ))
315- print (final_text , flush = True , end = "" )
316- completed = True
317-
318- if requests and not completed :
319- responses : dict [str , str ] = {}
320- for request_id , request in requests :
321- print ("\n ----- Writer draft -----" )
322- print (request .draft_text .strip ())
323- print ("\n Provide guidance for the editor (or 'approve' to accept the draft)." )
324- answer = input ("Human feedback: " ).strip () # noqa: ASYNC250
325- if answer .lower () == "exit" :
326- print ("Exiting..." )
327- return
328- responses [request_id ] = answer
329- pending_responses = responses
308+ # Run until there are no more requests
309+ while pending_responses is not None :
310+ stream = workflow .run (stream = True , responses = pending_responses )
311+ pending_responses = await consume_stream (stream )
330312
331313 print ("Workflow complete." )
332314
0 commit comments