From 2375651ad9946d4a40e69dc0b89260c5461aec9a Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Fri, 6 Feb 2026 15:41:20 -0800 Subject: [PATCH 01/16] WIP: Checkpoint refactor: encode/decode, checkpoint format, etc --- python/.cspell.json | 8 +- .../agent_framework/_workflows/__init__.py | 3 - .../_workflows/_agent_executor.py | 20 +- .../agent_framework/_workflows/_checkpoint.py | 52 ++- .../_workflows/_checkpoint_encoding.py | 383 +++++++----------- .../_workflows/_checkpoint_summary.py | 49 --- .../_workflows/_conversation_state.py | 75 ---- .../agent_framework/_workflows/_events.py | 5 +- .../agent_framework/_workflows/_runner.py | 149 ++----- .../_workflows/_runner_context.py | 62 +-- .../agent_framework/_workflows/_workflow.py | 13 +- .../_workflows/_workflow_executor.py | 5 +- .../tests/workflow/test_agent_executor.py | 8 +- .../tests/workflow/test_checkpoint_decode.py | 218 +++++----- .../tests/workflow/test_checkpoint_encode.py | 267 ++++-------- .../test_request_info_and_response.py | 10 +- .../test_request_info_event_rehydrate.py | 124 +----- .../core/tests/workflow/test_runner.py | 12 +- .../core/tests/workflow/test_workflow.py | 2 +- .../workflow/test_workflow_observability.py | 4 +- .../_group_chat.py | 5 +- .../_orchestration_state.py | 12 +- .../checkpoint_with_human_in_the_loop.py | 28 -- 23 files changed, 451 insertions(+), 1063 deletions(-) delete mode 100644 python/packages/core/agent_framework/_workflows/_checkpoint_summary.py delete mode 100644 python/packages/core/agent_framework/_workflows/_conversation_state.py diff --git a/python/.cspell.json b/python/.cspell.json index db575845e8..a26cc7fed7 100644 --- a/python/.cspell.json +++ b/python/.cspell.json @@ -24,8 +24,8 @@ ], "words": [ "aeiou", - "aiplatform", "agui", + "aiplatform", "azuredocindex", "azuredocs", "azurefunctions", @@ -57,20 +57,22 @@ "nopep", "NOSQL", "ollama", - "otlp", "Onnx", "onyourdatatest", "OPENAI", "opentelemetry", "OTEL", + "otlp", "powerfx", "protos", "pydantic", "pytestmark", "qdrant", "retrywrites", - "streamable", "serde", + "streamable", + "superstep", + "supersteps", "templating", "uninstrument", "vectordb", diff --git a/python/packages/core/agent_framework/_workflows/__init__.py b/python/packages/core/agent_framework/_workflows/__init__.py index c5666f7b26..f998786eb0 100644 --- a/python/packages/core/agent_framework/_workflows/__init__.py +++ b/python/packages/core/agent_framework/_workflows/__init__.py @@ -13,7 +13,6 @@ InMemoryCheckpointStorage, WorkflowCheckpoint, ) -from ._checkpoint_summary import WorkflowCheckpointSummary, get_checkpoint_summary from ._const import ( DEFAULT_MAX_ITERATIONS, ) @@ -108,7 +107,6 @@ "WorkflowBuilder", "WorkflowCheckpoint", "WorkflowCheckpointException", - "WorkflowCheckpointSummary", "WorkflowContext", "WorkflowConvergenceException", "WorkflowErrorDetails", @@ -124,7 +122,6 @@ "WorkflowViz", "create_edge_runner", "executor", - "get_checkpoint_summary", "handler", "resolve_agent_id", "response_handler", diff --git a/python/packages/core/agent_framework/_workflows/_agent_executor.py b/python/packages/core/agent_framework/_workflows/_agent_executor.py index a7e2bd79b9..35e2cbd9c5 100644 --- a/python/packages/core/agent_framework/_workflows/_agent_executor.py +++ b/python/packages/core/agent_framework/_workflows/_agent_executor.py @@ -13,9 +13,7 @@ from .._threads import AgentThread from .._types import AgentResponse, AgentResponseUpdate, ChatMessage from ._agent_utils import resolve_agent_id -from ._checkpoint_encoding import decode_checkpoint_value, encode_checkpoint_value from ._const import WORKFLOW_RUN_KWARGS_KEY -from ._conversation_state import encode_chat_messages from ._executor import Executor, handler from ._message_utils import normalize_messages_input from ._request_info_mixin import response_handler @@ -229,11 +227,11 @@ async def on_checkpoint_save(self) -> dict[str, Any]: serialized_thread = await self._agent_thread.serialize() return { - "cache": encode_chat_messages(self._cache), - "full_conversation": encode_chat_messages(self._full_conversation), + "cache": self._cache, + "full_conversation": self._full_conversation, "agent_thread": serialized_thread, - "pending_agent_requests": encode_checkpoint_value(self._pending_agent_requests), - "pending_responses_to_agent": encode_checkpoint_value(self._pending_responses_to_agent), + "pending_agent_requests": self._pending_agent_requests, + "pending_responses_to_agent": self._pending_responses_to_agent, } @override @@ -243,12 +241,10 @@ async def on_checkpoint_restore(self, state: dict[str, Any]) -> None: Args: state: Checkpoint data dict """ - from ._conversation_state import decode_chat_messages - cache_payload = state.get("cache") if cache_payload: try: - self._cache = decode_chat_messages(cache_payload) + self._cache = cache_payload except Exception as exc: logger.warning("Failed to restore cache: %s", exc) self._cache = [] @@ -258,7 +254,7 @@ async def on_checkpoint_restore(self, state: dict[str, Any]) -> None: full_conversation_payload = state.get("full_conversation") if full_conversation_payload: try: - self._full_conversation = decode_chat_messages(full_conversation_payload) + self._full_conversation = full_conversation_payload except Exception as exc: logger.warning("Failed to restore full conversation: %s", exc) self._full_conversation = [] @@ -279,11 +275,11 @@ async def on_checkpoint_restore(self, state: dict[str, Any]) -> None: pending_requests_payload = state.get("pending_agent_requests") if pending_requests_payload: - self._pending_agent_requests = decode_checkpoint_value(pending_requests_payload) + self._pending_agent_requests = pending_requests_payload pending_responses_payload = state.get("pending_responses_to_agent") if pending_responses_payload: - self._pending_responses_to_agent = decode_checkpoint_value(pending_responses_payload) + self._pending_responses_to_agent = pending_responses_payload def reset(self) -> None: """Reset the internal cache of the executor.""" diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 0334ee3893..0b8a319736 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -11,10 +11,14 @@ from dataclasses import asdict, dataclass, field from datetime import datetime, timezone from pathlib import Path -from typing import Any, Protocol +from typing import TYPE_CHECKING, Any, Protocol logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from ._events import WorkflowEvent + from ._runner_context import Message + @dataclass(slots=True) class WorkflowCheckpoint: @@ -29,9 +33,12 @@ class WorkflowCheckpoint: timestamp: ISO 8601 timestamp when checkpoint was created messages: Messages exchanged between executors state: Committed workflow state including user data and executor states. - This contains only committed state; pending state changes are not - included in checkpoints. Executor states are stored under the - reserved key '_executor_state'. + This contains only committed state; pending state changes are not + included in checkpoints. Executor states are stored under the + reserved key '_executor_state'. + pending_request_info_events: Any pending request info events that have not + yet been processed at the time of checkpointing. This allows the workflow + to resume with the correct pending events after a restore. iteration_count: Current iteration number when checkpoint was created metadata: Additional metadata (e.g., superstep info, graph signature) version: Checkpoint format version @@ -46,9 +53,9 @@ class WorkflowCheckpoint: timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat()) # Core workflow state - messages: dict[str, list[dict[str, Any]]] = field(default_factory=dict) # type: ignore[misc] + messages: dict[str, list[Message]] = field(default_factory=dict) # type: ignore[misc] state: dict[str, Any] = field(default_factory=dict) # type: ignore[misc] - pending_request_info_events: dict[str, dict[str, Any]] = field(default_factory=dict) # type: ignore[misc] + pending_request_info_events: dict[str, WorkflowEvent[Any]] = field(default_factory=dict) # type: ignore[misc] # Runtime state iteration_count: int = 0 @@ -131,7 +138,16 @@ async def delete_checkpoint(self, checkpoint_id: str) -> bool: class FileCheckpointStorage: - """File-based checkpoint storage for persistence.""" + """File-based checkpoint storage for persistence. + + This storage implements a hybrid approach where the checkpoint metadata and structure are + stored in JSON format, while the actual state data (which may contain complex Python objects) + is serialized using pickle and embedded as base64-encoded strings within the JSON. This allows + for human-readable checkpoint files while preserving the ability to store complex Python objects. + + SECURITY WARNING: Checkpoints use pickle for data serialization. Only load checkpoints + from trusted sources. Loading a malicious checkpoint file can execute arbitrary code. + """ def __init__(self, storage_path: str | Path): """Initialize the file storage.""" @@ -141,13 +157,16 @@ def __init__(self, storage_path: str | Path): async def save_checkpoint(self, checkpoint: WorkflowCheckpoint) -> str: """Save a checkpoint and return its ID.""" + from ._checkpoint_encoding import encode_checkpoint_value + file_path = self.storage_path / f"{checkpoint.checkpoint_id}.json" checkpoint_dict = asdict(checkpoint) + encoded_checkpoint = encode_checkpoint_value(checkpoint_dict) def _write_atomic() -> None: tmp_path = file_path.with_suffix(".json.tmp") with open(tmp_path, "w") as f: - json.dump(checkpoint_dict, f, indent=2, ensure_ascii=False) + json.dump(encoded_checkpoint, f, indent=2, ensure_ascii=False) os.replace(tmp_path, file_path) await asyncio.to_thread(_write_atomic) @@ -166,9 +185,12 @@ def _read() -> dict[str, Any]: with open(file_path) as f: return json.load(f) # type: ignore[no-any-return] - checkpoint_dict = await asyncio.to_thread(_read) + encoded_checkpoint = await asyncio.to_thread(_read) - checkpoint = WorkflowCheckpoint(**checkpoint_dict) + from ._checkpoint_encoding import decode_checkpoint_value + + decoded_checkpoint_dict = decode_checkpoint_value(encoded_checkpoint) + checkpoint = WorkflowCheckpoint.from_dict(decoded_checkpoint_dict) logger.info(f"Loaded checkpoint {checkpoint_id} from {file_path}") return checkpoint @@ -197,9 +219,13 @@ def _list_checkpoints() -> list[WorkflowCheckpoint]: for file_path in self.storage_path.glob("*.json"): try: with open(file_path) as f: - data = json.load(f) - if workflow_id is None or data.get("workflow_id") == workflow_id: - checkpoints.append(WorkflowCheckpoint.from_dict(data)) + encoded_checkpoint = json.load(f) + from ._checkpoint_encoding import decode_checkpoint_value + + decoded_checkpoint_dict = decode_checkpoint_value(encoded_checkpoint) + checkpoint = WorkflowCheckpoint.from_dict(decoded_checkpoint_dict) + if workflow_id is None or checkpoint.workflow_id == workflow_id: + checkpoints.append(checkpoint) except Exception as e: logger.warning(f"Failed to read checkpoint file {file_path}: {e}") return checkpoints diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index 516a4547a0..ede7b1c28d 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -1,268 +1,157 @@ # Copyright (c) Microsoft. All rights reserved. -import contextlib -import importlib -import logging -import sys -from dataclasses import fields, is_dataclass -from typing import Any, cast +"""Checkpoint encoding using JSON structure with pickle+base64 for arbitrary data. -# Checkpoint serialization helpers -MODEL_MARKER = "__af_model__" -DATACLASS_MARKER = "__af_dataclass__" +This hybrid approach provides: +- Human-readable JSON structure for debugging and inspection of primitives and collections +- Full Python object fidelity via pickle for data values (non-JSON-native types) +- Base64 encoding to embed binary pickle data in JSON strings -# Guards to prevent runaway recursion while encoding arbitrary user data -_MAX_ENCODE_DEPTH = 100 -_CYCLE_SENTINEL = "" +SECURITY WARNING: Checkpoints use pickle for data serialization. Only load checkpoints +from trusted sources. Loading a malicious checkpoint file can execute arbitrary code. +""" -logger = logging.getLogger(__name__) +from __future__ import annotations + +import base64 +import pickle # noqa: S403 +from typing import Any + +from agent_framework import get_logger + +logger = get_logger(__name__) + +# Marker to identify pickled values in serialized JSON +_PICKLE_MARKER = "__pickled__" +_TYPE_MARKER = "__type__" + +# Types that are natively JSON-serializable and don't need pickling +_JSON_NATIVE_TYPES = (str, int, float, bool, type(None)) + + +class CheckpointDecodingError(Exception): + """Raised when checkpoint decoding fails due to type mismatch or corruption.""" def encode_checkpoint_value(value: Any) -> Any: - """Recursively encode values into JSON-serializable structures. + """Encode a Python value for checkpoint storage. - - Objects exposing to_dict/to_json -> { MODEL_MARKER: "module:Class", value: encoded } - - dataclass instances -> { DATACLASS_MARKER: "module:Class", value: {field: encoded} } - - dict -> encode keys as str and values recursively - - list/tuple/set -> list of encoded items - - other -> returned as-is if already JSON-serializable + JSON-native types (str, int, float, bool, None) pass through unchanged. + Collections (dict, list) are recursed with their values encoded. + All other types (dataclasses, custom objects, datetime, etc.) are pickled + and stored as base64-encoded strings. - Includes cycle and depth protection to avoid infinite recursion. - """ + Args: + value: Any Python value to encode. - def _enc(v: Any, stack: set[int], depth: int) -> Any: - # Depth guard - if depth > _MAX_ENCODE_DEPTH: - logger.debug(f"Max encode depth reached at depth={depth} for type={type(v)}") - return "" - - # Structured model handling (objects exposing to_dict/to_json) - if _supports_model_protocol(v): - cls = cast(type[Any], type(v)) # type: ignore - try: - if hasattr(v, "to_dict") and callable(getattr(v, "to_dict", None)): - raw = v.to_dict() # type: ignore[attr-defined] - strategy = "to_dict" - elif hasattr(v, "to_json") and callable(getattr(v, "to_json", None)): - serialized = v.to_json() # type: ignore[attr-defined] - if isinstance(serialized, (bytes, bytearray)): - try: - serialized = serialized.decode() - except Exception: - serialized = serialized.decode(errors="replace") - raw = serialized - strategy = "to_json" - else: - raise AttributeError("Structured model lacks serialization hooks") - return { - MODEL_MARKER: f"{cls.__module__}:{cls.__name__}", - "strategy": strategy, - "value": _enc(raw, stack, depth + 1), - } - except Exception as exc: # best-effort fallback - logger.debug(f"Structured model serialization failed for {cls}: {exc}") - return str(v) - - # Dataclasses (instances only) - if is_dataclass(v) and not isinstance(v, type): - oid = id(v) - if oid in stack: - logger.debug("Cycle detected while encoding dataclass instance") - return _CYCLE_SENTINEL - stack.add(oid) - try: - # type(v) already narrows sufficiently; cast was redundant - dc_cls: type[Any] = type(v) - field_values: dict[str, Any] = {} - for f in fields(v): - field_values[f.name] = _enc(getattr(v, f.name), stack, depth + 1) - return { - DATACLASS_MARKER: f"{dc_cls.__module__}:{dc_cls.__name__}", - "value": field_values, - } - finally: - stack.remove(oid) - - # Collections - if isinstance(v, dict): - v_dict = cast("dict[object, object]", v) - oid = id(v_dict) - if oid in stack: - logger.debug("Cycle detected while encoding dict") - return _CYCLE_SENTINEL - stack.add(oid) - try: - json_dict: dict[str, Any] = {} - for k_any, val_any in v_dict.items(): # type: ignore[assignment] - k_str: str = str(k_any) - json_dict[k_str] = _enc(val_any, stack, depth + 1) - return json_dict - finally: - stack.remove(oid) - - if isinstance(v, (list, tuple, set)): - iterable_v = cast("list[object] | tuple[object, ...] | set[object]", v) - oid = id(iterable_v) - if oid in stack: - logger.debug("Cycle detected while encoding iterable") - return _CYCLE_SENTINEL - stack.add(oid) - try: - seq: list[object] = list(iterable_v) - encoded_list: list[Any] = [] - for item in seq: - encoded_list.append(_enc(item, stack, depth + 1)) - return encoded_list - finally: - stack.remove(oid) - - # Primitives (or unknown objects): ensure JSON-serializable - if isinstance(v, (str, int, float, bool)) or v is None: - return v - # Fallback: stringify unknown objects to avoid JSON serialization errors - try: - return str(v) - except Exception: - return f"<{type(v).__name__}>" - - return _enc(value, set(), 0) + Returns: + A JSON-serializable representation of the value. + """ + return _encode(value) def decode_checkpoint_value(value: Any) -> Any: - """Recursively decode values previously encoded by encode_checkpoint_value.""" + """Decode a value from checkpoint storage. + + Reverses the encoding performed by encode_checkpoint_value. + Pickled values (identified by _PICKLE_MARKER) are decoded and unpickled. + + WARNING: Only call this with trusted data. Pickle can execute + arbitrary code during deserialization. + + Args: + value: A JSON-deserialized value from checkpoint storage. + + Returns: + The original Python value. + + Raises: + CheckpointDecodingError: If the unpickled object's type doesn't match + the recorded type, indicating corruption or tampering. + """ + return _decode(value) + + +def _encode(value: Any) -> Any: + """Recursively encode a value for JSON storage.""" + # JSON-native types pass through + if isinstance(value, _JSON_NATIVE_TYPES): + return value + + # Recursively encode dict values (keys become strings) if isinstance(value, dict): - value_dict = cast(dict[str, Any], value) # encoded form always uses string keys - # Structured model marker handling - if MODEL_MARKER in value_dict and "value" in value_dict: - type_key: str | None = value_dict.get(MODEL_MARKER) # type: ignore[assignment] - strategy: str | None = value_dict.get("strategy") # type: ignore[assignment] - raw_encoded: Any = value_dict.get("value") - decoded_payload = decode_checkpoint_value(raw_encoded) - if isinstance(type_key, str): - try: - cls = _import_qualified_name(type_key) - except Exception as exc: - logger.debug(f"Failed to import structured model {type_key}: {exc}") - cls = None - - if cls is not None: - # Verify the class actually supports the model protocol - if not _class_supports_model_protocol(cls): - logger.debug(f"Class {type_key} does not support model protocol; returning raw value") - return decoded_payload - if strategy == "to_dict" and hasattr(cls, "from_dict"): - with contextlib.suppress(Exception): - return cls.from_dict(decoded_payload) - if strategy == "to_json" and hasattr(cls, "from_json"): - if isinstance(decoded_payload, (str, bytes, bytearray)): - with contextlib.suppress(Exception): - return cls.from_json(decoded_payload) - if isinstance(decoded_payload, dict) and hasattr(cls, "from_dict"): - with contextlib.suppress(Exception): - return cls.from_dict(decoded_payload) - return decoded_payload - # Dataclass marker handling - if DATACLASS_MARKER in value_dict and "value" in value_dict: - type_key_dc: str | None = value_dict.get(DATACLASS_MARKER) # type: ignore[assignment] - raw_dc: Any = value_dict.get("value") - decoded_raw = decode_checkpoint_value(raw_dc) - if isinstance(type_key_dc, str): - try: - module_name, class_name = type_key_dc.split(":", 1) - module = sys.modules.get(module_name) - if module is None: - module = importlib.import_module(module_name) - cls_dc: Any = getattr(module, class_name) - # Verify the class is actually a dataclass type (not an instance) - if not isinstance(cls_dc, type) or not is_dataclass(cls_dc): - logger.debug(f"Class {type_key_dc} is not a dataclass type; returning raw value") - return decoded_raw - constructed = _instantiate_checkpoint_dataclass(cls_dc, decoded_raw) - if constructed is not None: - return constructed - except Exception as exc: - logger.debug(f"Failed to decode dataclass {type_key_dc}: {exc}; returning raw value") - return decoded_raw - - # Regular dict: decode recursively - decoded: dict[str, Any] = {} - for k_any, v_any in value_dict.items(): - decoded[k_any] = decode_checkpoint_value(v_any) - return decoded + return {str(k): _encode(v) for k, v in value.items()} # type: ignore + + # Recursively encode list/tuple/set items + if isinstance(value, (list, tuple, set)): + return [_encode(item) for item in value] # type: ignore + + # Everything else: pickle and base64 encode + return { + _PICKLE_MARKER: _pickle_to_base64(value), + _TYPE_MARKER: _type_to_key(type(value)), # type: ignore + } + + +def _decode(value: Any) -> Any: + """Recursively decode a value from JSON storage.""" + # JSON-native types pass through + if isinstance(value, _JSON_NATIVE_TYPES): + return value + + # Handle encoded dicts + if isinstance(value, dict): + # Pickled value: decode, unpickle, and verify type + if _PICKLE_MARKER in value and _TYPE_MARKER in value: + obj = _base64_to_unpickle(value[_PICKLE_MARKER]) # type: ignore + _verify_type(obj, value.get(_TYPE_MARKER)) # type: ignore + return obj + + # Regular dict: decode values recursively + return {k: _decode(v) for k, v in value.items()} # type: ignore + + # Handle encoded lists if isinstance(value, list): - # After isinstance check, treat value as list[Any] for decoding - value_list: list[Any] = value # type: ignore[assignment] - return [decode_checkpoint_value(v_any) for v_any in value_list] + return [_decode(item) for item in value] # type: ignore + return value -def _class_supports_model_protocol(cls: type[Any]) -> bool: - """Check if a class type supports the model serialization protocol. +def _verify_type(obj: Any, expected_type_key: str | None) -> None: + """Verify that an unpickled object matches its recorded type. - Checks for pairs of serialization/deserialization methods: - - to_dict/from_dict - - to_json/from_json + Args: + obj: The unpickled object. + expected_type_key: The recorded type key (module:qualname format). + + Raises: + CheckpointDecodingError: If the types don't match. """ - has_to_dict = hasattr(cls, "to_dict") and callable(getattr(cls, "to_dict", None)) - has_from_dict = hasattr(cls, "from_dict") and callable(getattr(cls, "from_dict", None)) - - has_to_json = hasattr(cls, "to_json") and callable(getattr(cls, "to_json", None)) - has_from_json = hasattr(cls, "from_json") and callable(getattr(cls, "from_json", None)) - - return (has_to_dict and has_from_dict) or (has_to_json and has_from_json) - - -def _supports_model_protocol(obj: object) -> bool: - """Detect objects that expose dictionary serialization hooks.""" - try: - obj_type: type[Any] = type(obj) - except Exception: - return False - - return _class_supports_model_protocol(obj_type) - - -def _import_qualified_name(qualname: str) -> type[Any] | None: - if ":" not in qualname: - return None - module_name, class_name = qualname.split(":", 1) - module = sys.modules.get(module_name) - if module is None: - module = importlib.import_module(module_name) - attr: Any = module - for part in class_name.split("."): - attr = getattr(attr, part) - return attr if isinstance(attr, type) else None - - -def _instantiate_checkpoint_dataclass(cls: type[Any], payload: Any) -> Any | None: - if not isinstance(cls, type): - logger.debug(f"Checkpoint decoder received non-type dataclass reference: {cls!r}") - return None - - if isinstance(payload, dict): - try: - return cls(**payload) # type: ignore[arg-type] - except TypeError as exc: - logger.debug(f"Checkpoint decoder could not call {cls.__name__}(**payload): {exc}") - except Exception as exc: - logger.warning(f"Checkpoint decoder encountered unexpected error calling {cls.__name__}(**payload): {exc}") - try: - instance = object.__new__(cls) - except Exception as exc: - logger.debug(f"Checkpoint decoder could not allocate {cls.__name__} without __init__: {exc}") - return None - for key, val in payload.items(): # type: ignore[attr-defined] - try: - setattr(instance, key, val) # type: ignore[arg-type] - except Exception as exc: - logger.debug(f"Checkpoint decoder could not set attribute {key} on {cls.__name__}: {exc}") - return instance - - try: - return cls(payload) # type: ignore[call-arg] - except TypeError as exc: - logger.debug(f"Checkpoint decoder could not call {cls.__name__}({payload!r}): {exc}") - except Exception as exc: - logger.warning(f"Checkpoint decoder encountered unexpected error calling {cls.__name__}({payload!r}): {exc}") - return None + if expected_type_key is None: + # No type recorded (legacy checkpoint), skip verification + return + + actual_type_key = _type_to_key(type(obj)) # type: ignore + if actual_type_key != expected_type_key: + raise CheckpointDecodingError( + f"Type mismatch during checkpoint decoding: " + f"expected '{expected_type_key}', got '{actual_type_key}'. " + f"The checkpoint may be corrupted or tampered with." + ) + + +def _pickle_to_base64(value: Any) -> str: + """Pickle a value and encode as base64 string.""" + pickled = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL) + return base64.b64encode(pickled).decode("ascii") + + +def _base64_to_unpickle(encoded: str) -> Any: + """Decode base64 string and unpickle.""" + pickled = base64.b64decode(encoded.encode("ascii")) + return pickle.loads(pickled) # noqa: S301 + + +def _type_to_key(t: type) -> str: + """Convert a type to a module:qualname string.""" + return f"{t.__module__}:{t.__qualname__}" diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_summary.py b/python/packages/core/agent_framework/_workflows/_checkpoint_summary.py deleted file mode 100644 index fe00c1a287..0000000000 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_summary.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -from dataclasses import dataclass - -from ._checkpoint import WorkflowCheckpoint -from ._const import EXECUTOR_STATE_KEY -from ._events import WorkflowEvent - -logger = logging.getLogger(__name__) - - -@dataclass -class WorkflowCheckpointSummary: - """Human-readable summary of a workflow checkpoint.""" - - checkpoint_id: str - timestamp: str - iteration_count: int - targets: list[str] - executor_ids: list[str] - status: str - pending_request_info_events: list[WorkflowEvent] - - -def get_checkpoint_summary(checkpoint: WorkflowCheckpoint) -> WorkflowCheckpointSummary: - targets = sorted(checkpoint.messages.keys()) - executor_ids = sorted(checkpoint.state.get(EXECUTOR_STATE_KEY, {}).keys()) - pending_request_info_events = [ - WorkflowEvent.from_dict(request) for request in checkpoint.pending_request_info_events.values() - ] - - status = "idle" - if pending_request_info_events: - status = "awaiting request response" - elif not checkpoint.messages and "finalise" in executor_ids: - status = "completed" - elif checkpoint.messages: - status = "awaiting next superstep" - - return WorkflowCheckpointSummary( - checkpoint_id=checkpoint.checkpoint_id, - timestamp=checkpoint.timestamp, - iteration_count=checkpoint.iteration_count, - targets=targets, - executor_ids=executor_ids, - status=status, - pending_request_info_events=pending_request_info_events, - ) diff --git a/python/packages/core/agent_framework/_workflows/_conversation_state.py b/python/packages/core/agent_framework/_workflows/_conversation_state.py deleted file mode 100644 index 22433e6775..0000000000 --- a/python/packages/core/agent_framework/_workflows/_conversation_state.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Iterable -from typing import Any, cast - -from agent_framework import ChatMessage - -from ._checkpoint_encoding import decode_checkpoint_value, encode_checkpoint_value - -"""Utilities for serializing and deserializing chat conversations for persistence. - -These helpers convert rich `ChatMessage` instances to checkpoint-friendly payloads -using the same encoding primitives as the workflow runner. This preserves -`additional_properties` and other metadata without relying on unsafe mechanisms -such as pickling. -""" - - -def encode_chat_messages(messages: Iterable[ChatMessage]) -> list[dict[str, Any]]: - """Serialize chat messages into checkpoint-safe payloads.""" - encoded: list[dict[str, Any]] = [] - for message in messages: - encoded.append({ - "role": encode_checkpoint_value(message.role), - "contents": [encode_checkpoint_value(content) for content in message.contents], - "author_name": message.author_name, - "message_id": message.message_id, - "additional_properties": { - key: encode_checkpoint_value(value) for key, value in message.additional_properties.items() - }, - }) - return encoded - - -def decode_chat_messages(payload: Iterable[dict[str, Any]]) -> list[ChatMessage]: - """Restore chat messages from checkpoint-safe payloads.""" - restored: list[ChatMessage] = [] - for item in payload: - if not isinstance(item, dict): - continue - - role_value = decode_checkpoint_value(item.get("role")) - if isinstance(role_value, str): - role = role_value - elif isinstance(role_value, dict) and "value" in role_value: - # Handle legacy serialization format - role = role_value["value"] - else: - role = "assistant" - - contents_field = item.get("contents", []) - contents: list[Any] = [] - if isinstance(contents_field, list): - contents_iter: list[Any] = contents_field # type: ignore[assignment] - for entry in contents_iter: - decoded_entry: Any = decode_checkpoint_value(entry) - contents.append(decoded_entry) - - additional_field = item.get("additional_properties", {}) - additional: dict[str, Any] = {} - if isinstance(additional_field, dict): - additional_dict = cast(dict[str, Any], additional_field) - for key, value in additional_dict.items(): - additional[key] = decode_checkpoint_value(value) - - restored.append( - ChatMessage( # type: ignore[call-overload] - role=role, - contents=contents, - author_name=item.get("author_name"), - message_id=item.get("message_id"), - additional_properties=additional, - ) - ) - return restored diff --git a/python/packages/core/agent_framework/_workflows/_events.py b/python/packages/core/agent_framework/_workflows/_events.py index 18e974e3e7..c4694bf31b 100644 --- a/python/packages/core/agent_framework/_workflows/_events.py +++ b/python/packages/core/agent_framework/_workflows/_events.py @@ -12,7 +12,6 @@ from enum import Enum from typing import Any, Generic, Literal, cast -from ._checkpoint_encoding import decode_checkpoint_value, encode_checkpoint_value from ._typing_utils import deserialize_type, serialize_type if sys.version_info >= (3, 13): @@ -396,7 +395,7 @@ def to_dict(self) -> dict[str, Any]: raise ValueError(f"to_dict() only supported for 'request_info' events, got '{self.type}'") return { "type": self.type, - "data": encode_checkpoint_value(self.data), + "data": self.data, "request_id": self._request_id, "source_executor_id": self._source_executor_id, "request_type": serialize_type(self._request_type) if self._request_type else None, @@ -410,7 +409,7 @@ def from_dict(cls, data: dict[str, Any]) -> WorkflowEvent[Any]: if prop not in data: raise KeyError(f"Missing '{prop}' field in WorkflowEvent dictionary.") - request_data = decode_checkpoint_value(data["data"]) + request_data = data["data"] request_type = deserialize_type(data["request_type"]) if request_type is not type(request_data): diff --git a/python/packages/core/agent_framework/_workflows/_runner.py b/python/packages/core/agent_framework/_workflows/_runner.py index f3a475e034..0de6db1d75 100644 --- a/python/packages/core/agent_framework/_workflows/_runner.py +++ b/python/packages/core/agent_framework/_workflows/_runner.py @@ -8,11 +8,6 @@ from typing import Any from ._checkpoint import CheckpointStorage, WorkflowCheckpoint -from ._checkpoint_encoding import ( - DATACLASS_MARKER, - MODEL_MARKER, - decode_checkpoint_value, -) from ._const import EXECUTOR_STATE_KEY from ._edge import EdgeGroup from ._edge_runner import EdgeRunner, create_edge_runner @@ -41,6 +36,7 @@ def __init__( executors: dict[str, Executor], state: State, ctx: RunnerContext, + graph_signature_hash: str, max_iterations: int = 100, workflow_id: str | None = None, ) -> None: @@ -51,20 +47,24 @@ def __init__( executors: Map of executor IDs to executor instances. state: The state for the workflow. ctx: The runner context for the workflow. + graph_signature_hash: A hash representing the workflow graph topology for checkpoint validation. max_iterations: The maximum number of iterations to run. workflow_id: The workflow ID for checkpointing. """ + # Workflow instance related attributes self._executors = executors self._edge_runners = [create_edge_runner(group, executors) for group in edge_groups] self._edge_runner_map = self._parse_edge_runners(self._edge_runners) self._ctx = ctx + self._graph_signature_hash = graph_signature_hash + self._workflow_id = workflow_id + + # Runner state related attributes self._iteration = 0 self._max_iterations = max_iterations self._state = state - self._workflow_id = workflow_id self._running = False self._resumed_from_checkpoint = False # Track whether we resumed - self.graph_signature_hash: str | None = None # Set workflow ID in context if provided if workflow_id: @@ -92,13 +92,12 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: for event in await self._ctx.drain_events(): yield event - # Create first checkpoint if there are messages from initial execution - if await self._ctx.has_messages() and self._ctx.has_checkpointing(): - if not self._resumed_from_checkpoint: - logger.info("Creating checkpoint after initial execution") - await self._create_checkpoint_if_enabled("after_initial_execution") - else: - logger.info("Skipping 'after_initial_execution' checkpoint because we resumed from a checkpoint") + # Create the first checkpoint. Checkpoints are usually considered to be created at the end of an iteration, + # we can think of the first checkpoint as being created at the end of a "superstep 0" which captures the + # initial state before any iterations have run. This is only needed if it's not a resume from checkpoint + # scenario, since if we are resuming, the caller should have already created a checkpoint to resume from. + if await self._ctx.has_messages() and not self._resumed_from_checkpoint: + await self._create_checkpoint_if_enabled() while self._iteration < self._max_iterations: logger.info(f"Starting superstep {self._iteration + 1}") @@ -145,7 +144,7 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: self._state.commit() # Create checkpoint after each superstep iteration - await self._create_checkpoint_if_enabled(f"superstep_{self._iteration}") + await self._create_checkpoint_if_enabled() yield WorkflowEvent.superstep_completed(iteration=self._iteration) @@ -169,19 +168,6 @@ async def _deliver_message_inner(edge_runner: EdgeRunner, message: Message) -> b """Inner loop to deliver a single message through an edge runner.""" return await edge_runner.send_message(message, self._state, self._ctx) - def _normalize_message_payload(message: Message) -> None: - data = message.data - if not isinstance(data, dict): - return - if MODEL_MARKER not in data and DATACLASS_MARKER not in data: - return - try: - decoded = decode_checkpoint_value(data) - except Exception as exc: # pragma: no cover - defensive - logger.debug("Failed to decode checkpoint payload during delivery: %s", exc) - return - message.data = decoded - # Route all messages through normal workflow edges associated_edge_runners = self._edge_runner_map.get(source_executor_id, []) if not associated_edge_runners: @@ -190,7 +176,6 @@ def _normalize_message_payload(message: Message) -> None: return for message in messages: - _normalize_message_payload(message) # Deliver a message through all edge runners associated with the source executor concurrently. tasks = [_deliver_message_inner(edge_runner, message) for edge_runner in associated_edge_runners] await asyncio.gather(*tasks) @@ -199,7 +184,7 @@ def _normalize_message_payload(message: Message) -> None: tasks = [_deliver_messages(source_executor_id, messages) for source_executor_id, messages in messages.items()] await asyncio.gather(*tasks) - async def _create_checkpoint_if_enabled(self, checkpoint_type: str) -> str | None: + async def _create_checkpoint_if_enabled(self) -> str | None: """Create a checkpoint if checkpointing is enabled and attach a label and metadata.""" if not self._ctx.has_checkpointing(): return None @@ -207,22 +192,16 @@ async def _create_checkpoint_if_enabled(self, checkpoint_type: str) -> str | Non try: # Snapshot executor states await self._save_executor_states() - checkpoint_category = "initial" if checkpoint_type == "after_initial_execution" else "superstep" - metadata = { - "superstep": self._iteration, - "checkpoint_type": checkpoint_category, - } - if self.graph_signature_hash: - metadata["graph_signature"] = self.graph_signature_hash checkpoint_id = await self._ctx.create_checkpoint( self._state, self._iteration, - metadata=metadata, + metadata={"graph_signature": self._graph_signature_hash}, ) - logger.info(f"Created {checkpoint_type} checkpoint: {checkpoint_id}") + + logger.info(f"Created checkpoint: {checkpoint_id}") return checkpoint_id except Exception as e: - logger.warning(f"Failed to create {checkpoint_type} checkpoint: {e}") + logger.warning(f"Failed to create checkpoint: {e}") return None async def restore_from_checkpoint( @@ -260,14 +239,13 @@ async def restore_from_checkpoint( raise WorkflowCheckpointException(f"Checkpoint {checkpoint_id} not found") # Validate the loaded checkpoint against the workflow - graph_hash = getattr(self, "graph_signature_hash", None) checkpoint_hash = (checkpoint.metadata or {}).get("graph_signature") - if graph_hash and checkpoint_hash and graph_hash != checkpoint_hash: + if checkpoint_hash and self._graph_signature_hash != checkpoint_hash: raise WorkflowCheckpointException( "Workflow graph has changed since the checkpoint was created. " "Please rebuild the original workflow before resuming." ) - if graph_hash and not checkpoint_hash: + if self._graph_signature_hash and not checkpoint_hash: logger.warning( "Checkpoint %s does not include graph signature metadata; skipping topology validation.", checkpoint_id, @@ -275,7 +253,7 @@ async def restore_from_checkpoint( self._workflow_id = checkpoint.workflow_id # Restore state - self._state.import_state(decode_checkpoint_value(checkpoint.state)) + self._state.import_state(checkpoint.state) # Restore executor states using the restored state await self._restore_executor_states() # Apply the checkpoint to the context @@ -291,64 +269,19 @@ async def restore_from_checkpoint( raise WorkflowCheckpointException(f"Failed to restore from checkpoint {checkpoint_id}") from e async def _save_executor_states(self) -> None: - """Populate executor state by calling checkpoint hooks on executors. - - Backward compatibility behavior: - - If an executor defines an async or sync method `snapshot_state(self) -> dict`, use it. - - Else if it has a plain attribute `state` that is a dict, use that. - - Updated behavior: - - Executors should implement `on_checkpoint_save(self) -> dict` to provide state. - - This method will try the backward compatibility behavior first; if that does not yield state, - it falls back to the updated behavior. - - Only JSON-serializable dicts should be provided by executors. - """ + """Populate executor state by calling checkpoint hooks on executors.""" for exec_id, executor in self._executors.items(): - state_dict: dict[str, Any] | None = None - # Try backward compatibility behavior first - # TODO(@taochen): Remove backward compatibility - snapshot = getattr(executor, "snapshot_state", None) - try: - if callable(snapshot): - maybe = snapshot() - if asyncio.iscoroutine(maybe): # type: ignore[arg-type] - maybe = await maybe # type: ignore[assignment] - if isinstance(maybe, dict): - state_dict = maybe # type: ignore[assignment] - else: - state_attr = getattr(executor, "state", None) - if isinstance(state_attr, dict): - state_dict = state_attr # type: ignore[assignment] - except Exception as ex: # pragma: no cover - logger.debug(f"Executor {exec_id} snapshot_state failed: {ex}") - - if state_dict is None: - # Try the updated behavior only if backward compatibility did not yield state - try: - state_dict = await executor.on_checkpoint_save() - except Exception as ex: # pragma: no cover - raise WorkflowCheckpointException(f"Executor {exec_id} on_checkpoint_save failed") from ex - + # Try the updated behavior only if backward compatibility did not yield state try: + state_dict = await executor.on_checkpoint_save() await self._set_executor_state(exec_id, state_dict) + except WorkflowCheckpointException: + raise except Exception as ex: # pragma: no cover - logger.debug(f"Failed to persist state for executor {exec_id}: {ex}") + raise WorkflowCheckpointException(f"Executor {exec_id} on_checkpoint_save failed") from ex async def _restore_executor_states(self) -> None: - """Restore executor state by calling restore hooks on executors. - - Backward compatibility behavior: - - If an executor defines an async or sync method `restore_state(self, state: dict)`, use it. - - Else, skip restoration for that executor. - - Updated behavior: - - Executors should implement `on_checkpoint_restore(self, state: dict)` to restore state. - - This method will try the backward compatibility behavior first; if that does not restore state, - it falls back to the updated behavior. - """ + """Restore executor state by calling restore hooks on executors.""" has_executor_states = self._state.has(EXECUTOR_STATE_KEY) if not has_executor_states: return @@ -369,29 +302,11 @@ async def _restore_executor_states(self) -> None: if not executor: raise WorkflowCheckpointException(f"Executor {executor_id} not found during state restoration.") - # Try backward compatibility behavior first - # TODO(@taochen): Remove backward compatibility - restored = False - restore_method = getattr(executor, "restore_state", None) + # Try the updated behavior only if backward compatibility did not restore try: - if callable(restore_method): - maybe = restore_method(state) - if asyncio.iscoroutine(maybe): # type: ignore[arg-type] - await maybe # type: ignore[arg-type] - restored = True + await executor.on_checkpoint_restore(state) # pyright: ignore[reportUnknownArgumentType] except Exception as ex: # pragma: no cover - defensive - raise WorkflowCheckpointException(f"Executor {executor_id} restore_state failed") from ex - - if not restored: - # Try the updated behavior only if backward compatibility did not restore - try: - await executor.on_checkpoint_restore(state) # pyright: ignore[reportUnknownArgumentType] - restored = True - except Exception as ex: # pragma: no cover - defensive - raise WorkflowCheckpointException(f"Executor {executor_id} on_checkpoint_restore failed") from ex - - if not restored: - logger.debug(f"Executor {executor_id} does not support state restoration; skipping.") + raise WorkflowCheckpointException(f"Executor {executor_id} on_checkpoint_restore failed") from ex def _parse_edge_runners(self, edge_runners: list[EdgeRunner]) -> dict[str, list[EdgeRunner]]: """Parse the edge runners of the workflow into a mapping where each source executor ID maps to its edge runners. diff --git a/python/packages/core/agent_framework/_workflows/_runner_context.py b/python/packages/core/agent_framework/_workflows/_runner_context.py index ed81026245..885fe65119 100644 --- a/python/packages/core/agent_framework/_workflows/_runner_context.py +++ b/python/packages/core/agent_framework/_workflows/_runner_context.py @@ -4,7 +4,6 @@ import asyncio import logging -import sys import uuid from copy import copy from dataclasses import dataclass @@ -12,17 +11,11 @@ from typing import Any, Protocol, TypeVar, runtime_checkable from ._checkpoint import CheckpointStorage, WorkflowCheckpoint -from ._checkpoint_encoding import decode_checkpoint_value, encode_checkpoint_value from ._const import INTERNAL_SOURCE_ID from ._events import WorkflowEvent from ._state import State from ._typing_utils import is_instance_of -if sys.version_info >= (3, 11): - from typing import TypedDict # type: ignore # pragma: no cover -else: - from typing_extensions import TypedDict # type: ignore # pragma: no cover - logger = logging.getLogger(__name__) T = TypeVar("T") @@ -69,13 +62,13 @@ def source_span_id(self) -> str | None: def to_dict(self) -> dict[str, Any]: """Convert the Message to a dictionary for serialization.""" return { - "data": encode_checkpoint_value(self.data), + "data": self.data, "source_id": self.source_id, "target_id": self.target_id, "type": self.type.value, "trace_contexts": self.trace_contexts, "source_span_ids": self.source_span_ids, - "original_request_info_event": encode_checkpoint_value(self.original_request_info_event), + "original_request_info_event": self.original_request_info_event, } @staticmethod @@ -89,28 +82,16 @@ def from_dict(data: dict[str, Any]) -> Message: raise KeyError("Missing 'source_id' field in Message dictionary.") return Message( - data=decode_checkpoint_value(data["data"]), + data=data["data"], source_id=data["source_id"], target_id=data.get("target_id"), type=MessageType(data.get("type", "standard")), trace_contexts=data.get("trace_contexts"), source_span_ids=data.get("source_span_ids"), - original_request_info_event=decode_checkpoint_value(data.get("original_request_info_event")), + original_request_info_event=data.get("original_request_info_event"), ) -class _WorkflowState(TypedDict): - """TypedDict representing the serializable state of a workflow execution. - - This includes all state data needed for checkpointing and restoration. - """ - - messages: dict[str, list[dict[str, Any]]] - state: dict[str, Any] - iteration_count: int - pending_request_info_events: dict[str, dict[str, Any]] - - @runtime_checkable class RunnerContext(Protocol): """Protocol for the execution context used by the runner. @@ -384,15 +365,12 @@ async def create_checkpoint( if not storage: raise ValueError("Checkpoint storage not configured") - self._workflow_id = self._workflow_id or str(uuid.uuid4()) - workflow_state = self._get_serialized_workflow_state(state, iteration_count) - checkpoint = WorkflowCheckpoint( - workflow_id=self._workflow_id, - messages=workflow_state["messages"], - state=workflow_state["state"], - pending_request_info_events=workflow_state["pending_request_info_events"], - iteration_count=workflow_state["iteration_count"], + workflow_id=self._workflow_id or str(uuid.uuid4()), + messages=dict(self._messages), + state=state.export_state(), + pending_request_info_events=dict(self._pending_request_info_events), + iteration_count=iteration_count, metadata=metadata or {}, ) checkpoint_id = await storage.save_checkpoint(checkpoint) @@ -422,13 +400,11 @@ async def apply_checkpoint(self, checkpoint: WorkflowCheckpoint) -> None: self._messages.clear() messages_data = checkpoint.messages for source_id, message_list in messages_data.items(): - self._messages[source_id] = [Message.from_dict(msg) for msg in message_list] + self._messages[source_id] = list(message_list) # Restore pending request info events self._pending_request_info_events.clear() - pending_requests_data = checkpoint.pending_request_info_events - for request_id, request_data in pending_requests_data.items(): - request_info_event = WorkflowEvent.from_dict(request_data) + for request_id, request_info_event in checkpoint.pending_request_info_events.items(): self._pending_request_info_events[request_id] = request_info_event await self.add_event(request_info_event) @@ -456,22 +432,6 @@ def is_streaming(self) -> bool: """ return self._streaming - def _get_serialized_workflow_state(self, state: State, iteration_count: int) -> _WorkflowState: - serialized_messages: dict[str, list[dict[str, Any]]] = {} - for source_id, message_list in self._messages.items(): - serialized_messages[source_id] = [msg.to_dict() for msg in message_list] - - serialized_pending_request_info_events: dict[str, dict[str, Any]] = { - request_id: request.to_dict() for request_id, request in self._pending_request_info_events.items() - } - - return { - "messages": serialized_messages, - "state": encode_checkpoint_value(state.export_state()), - "iteration_count": iteration_count, - "pending_request_info_events": serialized_pending_request_info_events, - } - async def add_request_info_event(self, event: WorkflowEvent[Any]) -> None: """Add a request_info event to the context and track it for correlation. diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index f12e9c9b2a..8f84ee7301 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -214,6 +214,9 @@ def __init__( self.executors, self._state, runner_context, + # Capture a canonical fingerprint of the workflow graph so checkpoints + # can assert they are resumed with an equivalent topology. + self._hash_graph_signature(self._compute_graph_signature()), max_iterations=max_iterations, workflow_id=self.id, ) @@ -221,12 +224,6 @@ def __init__( # Flag to prevent concurrent workflow executions self._is_running = False - # Capture a canonical fingerprint of the workflow graph so checkpoints - # can assert they are resumed with an equivalent topology. - self._graph_signature = self._compute_graph_signature() - self._graph_signature_hash = self._hash_graph_signature(self._graph_signature) - self._runner.graph_signature_hash = self._graph_signature_hash - def _ensure_not_running(self) -> None: """Ensure the workflow is not already running.""" if self._is_running: @@ -786,10 +783,6 @@ def _hash_graph_signature(signature: dict[str, Any]) -> str: canonical = json.dumps(signature, sort_keys=True, separators=(",", ":")) return hashlib.sha256(canonical.encode("utf-8")).hexdigest() - @property - def graph_signature_hash(self) -> str: - return self._graph_signature_hash - @property def input_types(self) -> list[type[Any] | types.UnionType]: """Get the input types of the workflow. diff --git a/python/packages/core/agent_framework/_workflows/_workflow_executor.py b/python/packages/core/agent_framework/_workflows/_workflow_executor.py index b83c826873..e1e245e03c 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow_executor.py +++ b/python/packages/core/agent_framework/_workflows/_workflow_executor.py @@ -11,7 +11,7 @@ if TYPE_CHECKING: from ._workflow import Workflow -from ._checkpoint_encoding import decode_checkpoint_value, encode_checkpoint_value +from ._checkpoint_encoding import decode_checkpoint_value from ._const import WORKFLOW_RUN_KWARGS_KEY from ._events import ( WorkflowEvent, @@ -454,8 +454,7 @@ async def on_checkpoint_save(self) -> dict[str, Any]: """Get the current state of the WorkflowExecutor for checkpointing purposes.""" return { "execution_contexts": { - execution_id: encode_checkpoint_value(execution_context) - for execution_id, execution_context in self._execution_contexts.items() + execution_id: execution_context for execution_id, execution_context in self._execution_contexts.items() }, "request_to_execution": dict(self._request_to_execution), } diff --git a/python/packages/core/tests/workflow/test_agent_executor.py b/python/packages/core/tests/workflow/test_agent_executor.py index 3cbd369bf4..27ce31af39 100644 --- a/python/packages/core/tests/workflow/test_agent_executor.py +++ b/python/packages/core/tests/workflow/test_agent_executor.py @@ -87,12 +87,10 @@ async def test_agent_executor_checkpoint_stores_and_restores_state() -> None: checkpoints = await storage.list_checkpoints() assert len(checkpoints) > 0 - # Find a suitable checkpoint to restore (prefer superstep checkpoint) + # Get the second checkpoint which should contain the state after processing + # the first message by the start executor in the sequential workflow checkpoints.sort(key=lambda cp: cp.timestamp) - restore_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + restore_checkpoint = checkpoints[1] # Verify checkpoint contains executor state with both cache and thread assert "_executor_state" in restore_checkpoint.state diff --git a/python/packages/core/tests/workflow/test_checkpoint_decode.py b/python/packages/core/tests/workflow/test_checkpoint_decode.py index 431c70cc3c..99daa8cedb 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_decode.py +++ b/python/packages/core/tests/workflow/test_checkpoint_decode.py @@ -1,16 +1,17 @@ # Copyright (c) Microsoft. All rights reserved. -from dataclasses import dataclass # noqa: I001 +from dataclasses import dataclass +from datetime import datetime, timezone from typing import Any, cast +import pytest from agent_framework._workflows._checkpoint_encoding import ( - DATACLASS_MARKER, - MODEL_MARKER, + _TYPE_MARKER, # type: ignore + CheckpointDecodingError, decode_checkpoint_value, encode_checkpoint_value, ) -from agent_framework._workflows._typing_utils import is_instance_of @dataclass @@ -30,7 +31,22 @@ class SampleResponse: request_id: str -def test_decode_dataclass_with_nested_request() -> None: +# --- Tests for round-trip encode/decode --- + + +def test_roundtrip_simple_dataclass() -> None: + """Test encoding and decoding of a simple dataclass.""" + original = SampleRequest(request_id="test-123", prompt="test prompt") + + encoded = encode_checkpoint_value(original) + decoded = cast(SampleRequest, decode_checkpoint_value(encoded)) + + assert isinstance(decoded, SampleRequest) + assert decoded.request_id == "test-123" + assert decoded.prompt == "test prompt" + + +def test_roundtrip_dataclass_with_nested_request() -> None: """Test that dataclass with nested dataclass fields can be encoded and decoded correctly.""" original = SampleResponse( data="approve", @@ -49,45 +65,7 @@ def test_decode_dataclass_with_nested_request() -> None: assert decoded.original_request.request_id == "abc" -def test_is_instance_of_coerces_nested_dataclass_dict() -> None: - """Test that is_instance_of can handle nested structures with dict conversion.""" - response = SampleResponse( - data="approve", - original_request=SampleRequest(request_id="req-1", prompt="prompt"), - request_id="req-1", - ) - - # Simulate checkpoint decode fallback leaving a dict - response.original_request = cast( - Any, - { - "request_id": "req-1", - "prompt": "prompt", - }, - ) - - assert is_instance_of(response, SampleResponse) - assert isinstance(response.original_request, dict) - - # Verify the dict contains expected values - dict_request = cast(dict[str, Any], response.original_request) - assert dict_request["request_id"] == "req-1" - assert dict_request["prompt"] == "prompt" - - -def test_encode_decode_simple_dataclass() -> None: - """Test encoding and decoding of a simple dataclass.""" - original = SampleRequest(request_id="test-123", prompt="test prompt") - - encoded = encode_checkpoint_value(original) - decoded = cast(SampleRequest, decode_checkpoint_value(encoded)) - - assert isinstance(decoded, SampleRequest) - assert decoded.request_id == "test-123" - assert decoded.prompt == "test prompt" - - -def test_encode_decode_nested_structures() -> None: +def test_roundtrip_nested_structures() -> None: """Test encoding and decoding of complex nested structures.""" nested_data = { "requests": [ @@ -110,7 +88,6 @@ def test_encode_decode_nested_structures() -> None: assert "requests" in decoded assert "responses" in decoded - # Check the requests list requests = cast(list[Any], decoded["requests"]) assert isinstance(requests, list) assert len(requests) == 2 @@ -120,7 +97,6 @@ def test_encode_decode_nested_structures() -> None: assert first_request.request_id == "req-1" assert second_request.request_id == "req-2" - # Check the responses dict responses = cast(dict[str, Any], decoded["responses"]) assert isinstance(responses, dict) assert "req-1" in responses @@ -131,108 +107,100 @@ def test_encode_decode_nested_structures() -> None: assert response.original_request.request_id == "req-1" -def test_encode_allows_marker_key_without_value_key() -> None: - """Test that encoding a dict with only the marker key (no 'value') is allowed.""" - dict_with_marker_only = { - MODEL_MARKER: "some.module:FakeClass", - "other_key": "test", - } - encoded = encode_checkpoint_value(dict_with_marker_only) - assert MODEL_MARKER in encoded - assert "other_key" in encoded +def test_roundtrip_datetime() -> None: + """Test round-trip encoding/decoding of datetime objects.""" + original = datetime(2024, 5, 4, 12, 30, 45, tzinfo=timezone.utc) + + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) + assert isinstance(decoded, datetime) + assert decoded == original -def test_encode_allows_value_key_without_marker_key() -> None: - """Test that encoding a dict with only 'value' key (no marker) is allowed.""" - dict_with_value_only = { - "value": {"data": "test"}, - "other_key": "test", - } - encoded = encode_checkpoint_value(dict_with_value_only) - assert "value" in encoded - assert "other_key" in encoded +def test_roundtrip_primitives() -> None: + """Test that primitive types round-trip unchanged.""" + for value in ["hello", 42, 3.14, True, False, None]: + assert decode_checkpoint_value(encode_checkpoint_value(value)) == value -def test_encode_allows_marker_with_value_key() -> None: - """Test that encoding a dict with marker and 'value' keys is allowed. - This is allowed because legitimate encoded data may contain these keys, - and security is enforced at deserialization time by validating class types. - """ - dict_with_both = { - MODEL_MARKER: "some.module:SomeClass", - "value": {"data": "test"}, - "strategy": "to_dict", +def test_roundtrip_dict_with_mixed_values() -> None: + """Test round-trip of a dict containing both primitives and complex types.""" + original = { + "name": "test", + "request": SampleRequest(request_id="r1", prompt="p1"), + "count": 5, } - encoded = encode_checkpoint_value(dict_with_both) - assert MODEL_MARKER in encoded - assert "value" in encoded + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) -class NotADataclass: - """A regular class that is not a dataclass.""" + assert decoded["name"] == "test" + assert decoded["count"] == 5 + assert isinstance(decoded["request"], SampleRequest) + assert decoded["request"].request_id == "r1" - def __init__(self, value: str) -> None: - self.value = value - def get_value(self) -> str: - return self.value +# --- Tests for decode primitives --- -class NotAModel: - """A regular class that does not support the model protocol.""" +def test_decode_string() -> None: + """Test decoding a string passes through unchanged.""" + assert decode_checkpoint_value("hello") == "hello" - def __init__(self, value: str) -> None: - self.value = value - def get_value(self) -> str: - return self.value +def test_decode_integer() -> None: + """Test decoding an integer passes through unchanged.""" + assert decode_checkpoint_value(42) == 42 -def test_decode_rejects_non_dataclass_with_dataclass_marker() -> None: - """Test that decode returns raw value when marked class is not a dataclass.""" - # Manually construct a payload that claims NotADataclass is a dataclass - fake_payload = { - DATACLASS_MARKER: f"{NotADataclass.__module__}:{NotADataclass.__name__}", - "value": {"value": "test_value"}, - } +def test_decode_none() -> None: + """Test decoding None passes through unchanged.""" + assert decode_checkpoint_value(None) is None - decoded = decode_checkpoint_value(fake_payload) - # Should return the raw decoded value, not an instance of NotADataclass - assert isinstance(decoded, dict) - assert decoded["value"] == "test_value" +# --- Tests for decode collections --- -def test_decode_rejects_non_model_with_model_marker() -> None: - """Test that decode returns raw value when marked class doesn't support model protocol.""" - # Manually construct a payload that claims NotAModel supports the model protocol - fake_payload = { - MODEL_MARKER: f"{NotAModel.__module__}:{NotAModel.__name__}", - "strategy": "to_dict", - "value": {"value": "test_value"}, - } +def test_decode_plain_dict() -> None: + """Test decoding a plain dictionary with primitive values.""" + data = {"a": 1, "b": "two"} + assert decode_checkpoint_value(data) == {"a": 1, "b": "two"} - decoded = decode_checkpoint_value(fake_payload) - # Should return the raw decoded value, not an instance of NotAModel - assert isinstance(decoded, dict) - assert decoded["value"] == "test_value" +def test_decode_plain_list() -> None: + """Test decoding a plain list with primitive values.""" + data = [1, "two", 3.0] + assert decode_checkpoint_value(data) == [1, "two", 3.0] -def test_encode_allows_nested_dict_with_marker_keys() -> None: - """Test that encoding allows nested dicts containing marker patterns. +# --- Tests for type verification --- - Security is enforced at deserialization time, not serialization time, - so legitimate encoded data can contain markers at any nesting level. - """ - nested_data = { - "outer": { - MODEL_MARKER: "some.module:SomeClass", - "value": {"data": "test"}, - } - } - encoded = encode_checkpoint_value(nested_data) - assert "outer" in encoded - assert MODEL_MARKER in encoded["outer"] +def test_decode_raises_on_type_mismatch() -> None: + """Test that decoding raises CheckpointDecodingError when type doesn't match.""" + # Encode a SampleRequest but tamper with the type marker + encoded = encode_checkpoint_value(SampleRequest(request_id="r1", prompt="p1")) + assert isinstance(encoded, dict) + encoded[_TYPE_MARKER] = "nonexistent.module:FakeClass" + + with pytest.raises(CheckpointDecodingError, match="Type mismatch"): + decode_checkpoint_value(encoded) + + +class NotADataclass: # noqa: B903 + """A regular class that is not a dataclass.""" + + def __init__(self, value: str) -> None: + self.value = value + + +def test_roundtrip_regular_class() -> None: + """Test that regular (non-dataclass) objects can be round-tripped via pickle.""" + original = NotADataclass(value="test_value") + + encoded = encode_checkpoint_value(original) + decoded = cast(NotADataclass, decode_checkpoint_value(encoded)) + + assert isinstance(decoded, NotADataclass) + assert decoded.value == "test_value" diff --git a/python/packages/core/tests/workflow/test_checkpoint_encode.py b/python/packages/core/tests/workflow/test_checkpoint_encode.py index 3f4db1f864..7c90cc6436 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_encode.py +++ b/python/packages/core/tests/workflow/test_checkpoint_encode.py @@ -1,12 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. +import json from dataclasses import dataclass +from datetime import datetime, timezone from typing import Any from agent_framework._workflows._checkpoint_encoding import ( - _CYCLE_SENTINEL, - DATACLASS_MARKER, - MODEL_MARKER, + _PICKLE_MARKER, + _TYPE_MARKER, encode_checkpoint_value, ) @@ -41,23 +42,6 @@ def from_dict(cls, d: dict[str, Any]) -> "ModelWithToDict": return cls(data=d["data"]) -class ModelWithToJson: - """A class that implements to_json/from_json protocol.""" - - def __init__(self, data: str) -> None: - self.data = data - - def to_json(self) -> str: - return f'{{"data": "{self.data}"}}' - - @classmethod - def from_json(cls, json_str: str) -> "ModelWithToJson": - import json - - d = json.loads(json_str) - return cls(data=d["data"]) - - class UnknownObject: """A class that doesn't support any serialization protocol.""" @@ -68,43 +52,37 @@ def __str__(self) -> str: return f"UnknownObject({self.value})" -# --- Tests for primitive encoding --- +# --- Tests for primitive encoding (pass-through) --- def test_encode_string() -> None: """Test encoding a string value.""" - result = encode_checkpoint_value("hello") - assert result == "hello" + assert encode_checkpoint_value("hello") == "hello" def test_encode_integer() -> None: """Test encoding an integer value.""" - result = encode_checkpoint_value(42) - assert result == 42 + assert encode_checkpoint_value(42) == 42 def test_encode_float() -> None: """Test encoding a float value.""" - result = encode_checkpoint_value(3.14) - assert result == 3.14 + assert encode_checkpoint_value(3.14) == 3.14 def test_encode_boolean_true() -> None: """Test encoding a True boolean value.""" - result = encode_checkpoint_value(True) - assert result is True + assert encode_checkpoint_value(True) is True def test_encode_boolean_false() -> None: """Test encoding a False boolean value.""" - result = encode_checkpoint_value(False) - assert result is False + assert encode_checkpoint_value(False) is False def test_encode_none() -> None: """Test encoding a None value.""" - result = encode_checkpoint_value(None) - assert result is None + assert encode_checkpoint_value(None) is None # --- Tests for collection encoding --- @@ -112,8 +90,7 @@ def test_encode_none() -> None: def test_encode_empty_dict() -> None: """Test encoding an empty dictionary.""" - result = encode_checkpoint_value({}) - assert result == {} + assert encode_checkpoint_value({}) == {} def test_encode_simple_dict() -> None: @@ -132,8 +109,7 @@ def test_encode_dict_with_non_string_keys() -> None: def test_encode_empty_list() -> None: """Test encoding an empty list.""" - result = encode_checkpoint_value([]) - assert result == [] + assert encode_checkpoint_value([]) == [] def test_encode_simple_list() -> None: @@ -147,6 +123,7 @@ def test_encode_tuple() -> None: """Test encoding a tuple (converted to list).""" data = (1, 2, 3) result = encode_checkpoint_value(data) + assert isinstance(result, list) assert result == [1, 2, 3] @@ -160,13 +137,7 @@ def test_encode_set() -> None: def test_encode_nested_dict() -> None: """Test encoding a nested dictionary structure.""" - data = { - "outer": { - "inner": { - "value": 42, - } - } - } + data = {"outer": {"inner": {"value": 42}}} result = encode_checkpoint_value(data) assert result == {"outer": {"inner": {"value": 42}}} @@ -178,18 +149,18 @@ def test_encode_list_of_dicts() -> None: assert result == [{"a": 1}, {"b": 2}] -# --- Tests for dataclass encoding --- +# --- Tests for non-JSON-native types (pickled) --- def test_encode_simple_dataclass() -> None: - """Test encoding a simple dataclass.""" + """Test encoding a simple dataclass produces a pickled entry.""" obj = SimpleDataclass(name="test", value=42) result = encode_checkpoint_value(obj) assert isinstance(result, dict) - assert DATACLASS_MARKER in result - assert "value" in result - assert result["value"] == {"name": "test", "value": 42} + assert _PICKLE_MARKER in result + assert _TYPE_MARKER in result + assert isinstance(result[_PICKLE_MARKER], str) # base64 string def test_encode_nested_dataclass() -> None: @@ -199,12 +170,8 @@ def test_encode_nested_dataclass() -> None: result = encode_checkpoint_value(outer) assert isinstance(result, dict) - assert DATACLASS_MARKER in result - assert "value" in result - - outer_value = result["value"] - assert outer_value["outer_name"] == "outer" - assert DATACLASS_MARKER in outer_value["inner"] + assert _PICKLE_MARKER in result + assert _TYPE_MARKER in result def test_encode_list_of_dataclasses() -> None: @@ -218,7 +185,7 @@ def test_encode_list_of_dataclasses() -> None: assert isinstance(result, list) assert len(result) == 2 for item in result: - assert DATACLASS_MARKER in item + assert _PICKLE_MARKER in item def test_encode_dict_with_dataclass_values() -> None: @@ -230,169 +197,77 @@ def test_encode_dict_with_dataclass_values() -> None: result = encode_checkpoint_value(data) assert isinstance(result, dict) - assert DATACLASS_MARKER in result["item1"] - assert DATACLASS_MARKER in result["item2"] - - -# --- Tests for model protocol encoding --- + assert _PICKLE_MARKER in result["item1"] + assert _PICKLE_MARKER in result["item2"] def test_encode_model_with_to_dict() -> None: - """Test encoding an object implementing to_dict/from_dict protocol.""" + """Test encoding an object with to_dict is pickled (not using to_dict).""" obj = ModelWithToDict(data="test_data") result = encode_checkpoint_value(obj) assert isinstance(result, dict) - assert MODEL_MARKER in result - assert result["strategy"] == "to_dict" - assert result["value"] == {"data": "test_data"} + assert _PICKLE_MARKER in result -def test_encode_model_with_to_json() -> None: - """Test encoding an object implementing to_json/from_json protocol.""" - obj = ModelWithToJson(data="test_data") - result = encode_checkpoint_value(obj) - - assert isinstance(result, dict) - assert MODEL_MARKER in result - assert result["strategy"] == "to_json" - assert '"data": "test_data"' in result["value"] - - -# --- Tests for unknown object encoding --- - - -def test_encode_unknown_object_fallback_to_string() -> None: - """Test that unknown objects are encoded as strings.""" +def test_encode_unknown_object() -> None: + """Test that arbitrary objects are pickled.""" obj = UnknownObject(value="test") result = encode_checkpoint_value(obj) - assert isinstance(result, str) - assert "UnknownObject" in result - - -# --- Tests for cycle detection --- - - -def test_encode_dict_with_self_reference() -> None: - """Test that dict self-references are detected and handled.""" - data: dict[str, Any] = {"name": "test"} - data["self"] = data # Create circular reference - - result = encode_checkpoint_value(data) - assert result["name"] == "test" - assert result["self"] == _CYCLE_SENTINEL - - -def test_encode_list_with_self_reference() -> None: - """Test that list self-references are detected and handled.""" - data: list[Any] = [1, 2] - data.append(data) # Create circular reference - - result = encode_checkpoint_value(data) - assert result[0] == 1 - assert result[1] == 2 - assert result[2] == _CYCLE_SENTINEL + assert isinstance(result, dict) + assert _PICKLE_MARKER in result -# --- Tests for reserved keyword handling --- -# Note: Security is enforced at deserialization time by validating class types, -# not at serialization time. This allows legitimate encoded data to be re-encoded. +def test_encode_datetime() -> None: + """Test that datetime objects are pickled.""" + dt = datetime(2024, 5, 4, 12, 30, 45, tzinfo=timezone.utc) + result = encode_checkpoint_value(dt) + assert isinstance(result, dict) + assert _PICKLE_MARKER in result -def test_encode_allows_dict_with_model_marker_and_value() -> None: - """Test that encoding a dict with MODEL_MARKER and 'value' is allowed. - Security is enforced at deserialization time, not serialization time. - """ - data = { - MODEL_MARKER: "some.module:SomeClass", - "value": {"data": "test"}, - } - result = encode_checkpoint_value(data) - assert MODEL_MARKER in result - assert "value" in result +# --- Tests for type marker --- -def test_encode_allows_dict_with_dataclass_marker_and_value() -> None: - """Test that encoding a dict with DATACLASS_MARKER and 'value' is allowed. +def test_encode_type_marker_records_type_info() -> None: + """Test that encoded objects include correct type information.""" + obj = SimpleDataclass(name="test", value=42) + result = encode_checkpoint_value(obj) - Security is enforced at deserialization time, not serialization time. - """ - data = { - DATACLASS_MARKER: "some.module:SomeClass", - "value": {"field": "test"}, - } - result = encode_checkpoint_value(data) - assert DATACLASS_MARKER in result - assert "value" in result + type_key = result[_TYPE_MARKER] + assert "SimpleDataclass" in type_key -def test_encode_allows_nested_dict_with_marker_keys() -> None: - """Test that encoding nested dict with marker keys is allowed. +def test_encode_type_marker_uses_module_qualname_format() -> None: + """Test that type marker uses module:qualname format.""" + obj = SimpleDataclass(name="test", value=42) + result = encode_checkpoint_value(obj) - Security is enforced at deserialization time, not serialization time. - """ - nested_data = { - "outer": { - MODEL_MARKER: "some.module:SomeClass", - "value": {"data": "test"}, - } - } - result = encode_checkpoint_value(nested_data) - assert "outer" in result - assert MODEL_MARKER in result["outer"] + type_key = result[_TYPE_MARKER] + assert ":" in type_key + module, qualname = type_key.split(":") + assert module # non-empty module + assert qualname == "SimpleDataclass" -def test_encode_allows_marker_without_value() -> None: - """Test that a dict with marker key but without 'value' key is allowed.""" - data = { - MODEL_MARKER: "some.module:SomeClass", - "other_key": "allowed", - } - result = encode_checkpoint_value(data) - assert MODEL_MARKER in result - assert result["other_key"] == "allowed" +# --- Tests for JSON serializability --- -def test_encode_allows_value_without_marker() -> None: - """Test that a dict with 'value' key but without marker is allowed.""" +def test_encode_result_is_json_serializable() -> None: + """Test that encoded output is fully JSON-serializable.""" data = { - "value": {"nested": "data"}, - "other_key": "allowed", + "dc": SimpleDataclass(name="test", value=42), + "model": ModelWithToDict(data="test"), + "dt": datetime.now(timezone.utc), + "nested": [SimpleDataclass(name="n", value=1)], } - result = encode_checkpoint_value(data) - assert "value" in result - assert result["other_key"] == "allowed" - - -# --- Tests for max depth protection --- - - -def test_encode_deep_nesting_triggers_max_depth() -> None: - """Test that very deep nesting triggers max depth protection.""" - # Create a deeply nested structure (over 100 levels) - data: dict[str, Any] = {"level": 0} - current = data - for i in range(105): - current["nested"] = {"level": i + 1} - current = current["nested"] result = encode_checkpoint_value(data) - - # Navigate to find the max_depth sentinel - current_result = result - found_max_depth = False - for _ in range(110): - if isinstance(current_result, dict) and "nested" in current_result: - current_result = current_result["nested"] - if current_result == "": - found_max_depth = True - break - else: - break - - assert found_max_depth, "Expected sentinel to be found in deeply nested structure" + # Should not raise + json_str = json.dumps(result) + assert isinstance(json_str, str) # --- Tests for mixed complex structures --- @@ -413,6 +288,7 @@ def test_encode_complex_mixed_structure() -> None: result = encode_checkpoint_value(data) + # Primitives and collections pass through assert result["string_value"] == "hello" assert result["int_value"] == 42 assert result["float_value"] == 3.14 @@ -420,4 +296,17 @@ def test_encode_complex_mixed_structure() -> None: assert result["none_value"] is None assert result["list_value"] == [1, 2, 3] assert result["nested_dict"] == {"a": 1, "b": 2} - assert DATACLASS_MARKER in result["dataclass_value"] + # Dataclass is pickled + assert _PICKLE_MARKER in result["dataclass_value"] + + +def test_encode_preserves_dict_with_pickle_marker_key() -> None: + """Test that regular dicts containing _PICKLE_MARKER key are recursively encoded.""" + data = { + _PICKLE_MARKER: "some_value", + "other_key": "test", + } + result = encode_checkpoint_value(data) + assert _PICKLE_MARKER in result + assert result[_PICKLE_MARKER] == "some_value" + assert result["other_key"] == "test" diff --git a/python/packages/core/tests/workflow/test_request_info_and_response.py b/python/packages/core/tests/workflow/test_request_info_and_response.py index e545869a86..a288c962b5 100644 --- a/python/packages/core/tests/workflow/test_request_info_and_response.py +++ b/python/packages/core/tests/workflow/test_request_info_and_response.py @@ -363,12 +363,10 @@ async def test_checkpoint_with_pending_request_info_events(self): # Step 3: Verify the pending request info event was properly serialized serialized_event = checkpoint_with_request.pending_request_info_events[request_info_event.request_id] - assert "data" in serialized_event - assert "request_id" in serialized_event - assert "source_executor_id" in serialized_event - assert "request_type" in serialized_event - assert serialized_event["request_id"] == request_info_event.request_id - assert serialized_event["source_executor_id"] == "approval_executor" + assert serialized_event.data + assert serialized_event.request_type is UserApprovalRequest + assert serialized_event.request_id == request_info_event.request_id + assert serialized_event.source_executor_id == "approval_executor" # Step 4: Create a fresh workflow and restore from checkpoint new_executor = ApprovalRequiredExecutor(id="approval_executor") diff --git a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py index 73b4b938c1..dfbf134185 100644 --- a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py +++ b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py @@ -4,11 +4,11 @@ from dataclasses import dataclass, field from datetime import datetime, timezone -import pytest - from agent_framework import InMemoryCheckpointStorage, InProcRunnerContext -from agent_framework._workflows._checkpoint_encoding import DATACLASS_MARKER, encode_checkpoint_value -from agent_framework._workflows._checkpoint_summary import get_checkpoint_summary +from agent_framework._workflows._checkpoint_encoding import ( + _PICKLE_MARKER, + encode_checkpoint_value, +) from agent_framework._workflows._events import WorkflowEvent from agent_framework._workflows._state import State @@ -52,7 +52,7 @@ async def test_rehydrate_request_info_event() -> None: assert checkpoint is not None assert checkpoint.pending_request_info_events assert "request-123" in checkpoint.pending_request_info_events - assert "request_type" in checkpoint.pending_request_info_events["request-123"] + assert checkpoint.pending_request_info_events["request-123"].request_type is MockRequest # Rehydrate the context await runner_context.apply_checkpoint(checkpoint) @@ -67,97 +67,6 @@ async def test_rehydrate_request_info_event() -> None: assert isinstance(rehydrated_event.data, MockRequest) -async def test_rehydrate_fails_when_request_type_missing() -> None: - """Rehydration should fail is the request type is missing or fails to import.""" - request_info_event = WorkflowEvent.request_info( - request_id="request-123", - source_executor_id="review_gateway", - request_data=MockRequest(), - response_type=bool, - ) - - runner_context = InProcRunnerContext(InMemoryCheckpointStorage()) - await runner_context.add_request_info_event(request_info_event) - - checkpoint_id = await runner_context.create_checkpoint(State(), iteration_count=1) - checkpoint = await runner_context.load_checkpoint(checkpoint_id) - - assert checkpoint is not None - assert checkpoint.pending_request_info_events - assert "request-123" in checkpoint.pending_request_info_events - assert "request_type" in checkpoint.pending_request_info_events["request-123"] - - # Modify the checkpoint to simulate missing request type - checkpoint.pending_request_info_events["request-123"]["request_type"] = "nonexistent.module:MissingRequest" - - # Rehydrate the context - with pytest.raises(ImportError): - await runner_context.apply_checkpoint(checkpoint) - - -async def test_rehydrate_fails_when_request_type_mismatch() -> None: - """Rehydration should fail if the request type is mismatched.""" - request_info_event = WorkflowEvent.request_info( - request_id="request-123", - source_executor_id="review_gateway", - request_data=MockRequest(), - response_type=bool, - ) - - runner_context = InProcRunnerContext(InMemoryCheckpointStorage()) - await runner_context.add_request_info_event(request_info_event) - - checkpoint_id = await runner_context.create_checkpoint(State(), iteration_count=1) - checkpoint = await runner_context.load_checkpoint(checkpoint_id) - - assert checkpoint is not None - assert checkpoint.pending_request_info_events - assert "request-123" in checkpoint.pending_request_info_events - assert "request_type" in checkpoint.pending_request_info_events["request-123"] - - # Modify the checkpoint to simulate mismatched request type in the serialized data - checkpoint.pending_request_info_events["request-123"]["data"][DATACLASS_MARKER] = ( - "nonexistent.module:MissingRequest" - ) - - # Rehydrate the context - with pytest.raises(TypeError): - await runner_context.apply_checkpoint(checkpoint) - - -async def test_pending_requests_in_summary() -> None: - """Test that pending requests are correctly summarized in the checkpoint summary.""" - request_info_event = WorkflowEvent.request_info( - request_id="request-123", - source_executor_id="review_gateway", - request_data=MockRequest(), - response_type=bool, - ) - - runner_context = InProcRunnerContext(InMemoryCheckpointStorage()) - await runner_context.add_request_info_event(request_info_event) - - checkpoint_id = await runner_context.create_checkpoint(State(), iteration_count=1) - checkpoint = await runner_context.load_checkpoint(checkpoint_id) - - assert checkpoint is not None - summary = get_checkpoint_summary(checkpoint) - - assert summary.checkpoint_id == checkpoint_id - assert summary.status == "awaiting request response" - - assert len(summary.pending_request_info_events) == 1 - pending_event = summary.pending_request_info_events[0] - assert isinstance(pending_event, WorkflowEvent) - assert pending_event.type == "request_info" - assert pending_event.request_id == "request-123" - - assert pending_event.source_executor_id == "review_gateway" - assert pending_event.request_type is MockRequest - assert pending_event.response_type is bool - assert isinstance(pending_event.data, MockRequest) - - async def test_request_info_event_serializes_non_json_payloads() -> None: req_1 = WorkflowEvent.request_info( request_id="req-1", @@ -181,15 +90,22 @@ async def test_request_info_event_serializes_non_json_payloads() -> None: # Should be JSON serializable despite datetime/slots serialized = json.dumps(encode_checkpoint_value(checkpoint)) + assert isinstance(serialized, str) + + # Verify the structure contains pickled data for the request data fields deserialized = json.loads(serialized) + assert _PICKLE_MARKER in deserialized # checkpoint itself is pickled - assert "value" in deserialized - deserialized = deserialized["value"] + # Verify we can rehydrate the checkpoint correctly + await runner_context.apply_checkpoint(checkpoint) + pending = await runner_context.get_pending_request_info_events() - assert "pending_request_info_events" in deserialized - pending_request_info_events = deserialized["pending_request_info_events"] - assert "req-1" in pending_request_info_events - assert isinstance(pending_request_info_events["req-1"]["data"]["value"]["issued_at"], str) + assert "req-1" in pending + rehydrated_1 = pending["req-1"] + assert isinstance(rehydrated_1.data, TimedApproval) + assert rehydrated_1.data.issued_at == datetime(2024, 5, 4, 12, 30, 45) - assert "req-2" in pending_request_info_events - assert pending_request_info_events["req-2"]["data"]["value"]["note"] == "slot-based" + assert "req-2" in pending + rehydrated_2 = pending["req-2"] + assert isinstance(rehydrated_2.data, SlottedApproval) + assert rehydrated_2.data.note == "slot-based" diff --git a/python/packages/core/tests/workflow/test_runner.py b/python/packages/core/tests/workflow/test_runner.py index 7af722e45a..d61aa22cca 100644 --- a/python/packages/core/tests/workflow/test_runner.py +++ b/python/packages/core/tests/workflow/test_runner.py @@ -61,7 +61,7 @@ def test_create_runner(): executor_b.id: executor_b, } - runner = Runner(edge_groups, executors, state=State(), ctx=InProcRunnerContext()) + runner = Runner(edge_groups, executors, state=State(), ctx=InProcRunnerContext(), graph_signature_hash="test_hash") assert runner.context is not None and isinstance(runner.context, RunnerContext) @@ -84,7 +84,7 @@ async def test_runner_run_until_convergence(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx) + runner = Runner(edges, executors, state, ctx, graph_signature_hash="test_hash") result: int | None = None await executor_a.execute( @@ -122,7 +122,7 @@ async def test_runner_run_until_convergence_not_completed(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx, max_iterations=5) + runner = Runner(edges, executors, state, ctx, max_iterations=5, graph_signature_hash="test_hash") await executor_a.execute( MockMessage(data=0), @@ -156,7 +156,7 @@ async def test_runner_already_running(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx) + runner = Runner(edges, executors, state, ctx, graph_signature_hash="test_hash") await executor_a.execute( MockMessage(data=0), @@ -176,7 +176,7 @@ async def _run(): async def test_runner_emits_runner_completion_for_agent_response_without_targets(): ctx = InProcRunnerContext() - runner = Runner([], {}, State(), ctx) + runner = Runner([], {}, State(), ctx, graph_signature_hash="test_hash") await ctx.send_message( Message( @@ -228,7 +228,7 @@ async def test_runner_cancellation_stops_active_executor(): shared_state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, shared_state, ctx) + runner = Runner(edges, executors, shared_state, ctx, graph_signature_hash="test_hash") await executor_a.execute( MockMessage(data=0), diff --git a/python/packages/core/tests/workflow/test_workflow.py b/python/packages/core/tests/workflow/test_workflow.py index 1ab77096ac..e0b241274f 100644 --- a/python/packages/core/tests/workflow/test_workflow.py +++ b/python/packages/core/tests/workflow/test_workflow.py @@ -444,7 +444,7 @@ async def test_workflow_run_stream_from_checkpoint_with_responses( source_executor_id=simple_executor.id, request_data="Mock", response_type=str, - ).to_dict(), + ), }, iteration_count=0, ) diff --git a/python/packages/core/tests/workflow/test_workflow_observability.py b/python/packages/core/tests/workflow/test_workflow_observability.py index 82419510c6..6334457fbb 100644 --- a/python/packages/core/tests/workflow/test_workflow_observability.py +++ b/python/packages/core/tests/workflow/test_workflow_observability.py @@ -458,8 +458,8 @@ async def test_message_trace_context_serialization(span_exporter: InMemorySpanEx # Check serialized message includes trace context serialized_msg = checkpoint.messages["source"][0] - assert serialized_msg["trace_contexts"] == [{"traceparent": "00-trace-span-01"}] - assert serialized_msg["source_span_ids"] == ["span123"] + assert serialized_msg.trace_contexts == [{"traceparent": "00-trace-span-01"}] + assert serialized_msg.source_span_ids == ["span123"] # Test deserialization await ctx.apply_checkpoint(checkpoint) diff --git a/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py b/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py index 6ee764de20..f642b8eb49 100644 --- a/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py +++ b/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py @@ -32,7 +32,6 @@ from agent_framework._workflows._agent_executor import AgentExecutor, AgentExecutorRequest, AgentExecutorResponse from agent_framework._workflows._agent_utils import resolve_agent_id from agent_framework._workflows._checkpoint import CheckpointStorage -from agent_framework._workflows._conversation_state import decode_chat_messages, encode_chat_messages from agent_framework._workflows._executor import Executor from agent_framework._workflows._workflow import Workflow from agent_framework._workflows._workflow_builder import WorkflowBuilder @@ -476,7 +475,7 @@ async def _check_agent_terminate_and_yield( async def on_checkpoint_save(self) -> dict[str, Any]: """Capture current orchestrator state for checkpointing.""" state = await super().on_checkpoint_save() - state["cache"] = encode_chat_messages(self._cache) + state["cache"] = self._cache serialized_thread = await self._thread.serialize() state["thread"] = serialized_thread @@ -486,7 +485,7 @@ async def on_checkpoint_save(self) -> dict[str, Any]: async def on_checkpoint_restore(self, state: dict[str, Any]) -> None: """Restore executor state from checkpoint.""" await super().on_checkpoint_restore(state) - self._cache = decode_chat_messages(state.get("cache", [])) + self._cache = state.get("cache", []) serialized_thread = state.get("thread") if serialized_thread: self._thread = await self._agent.deserialize_thread(serialized_thread) diff --git a/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py b/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py index fe8ba64126..3f3c3c5f9b 100644 --- a/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py +++ b/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py @@ -59,15 +59,13 @@ def to_dict(self) -> dict[str, Any]: Returns: Dict with encoded conversation and metadata for persistence """ - from agent_framework._workflows._conversation_state import encode_chat_messages - result: dict[str, Any] = { - "conversation": encode_chat_messages(self.conversation), + "conversation": self.conversation, "round_index": self.round_index, "metadata": dict(self.metadata), } if self.task is not None: - result["task"] = encode_chat_messages([self.task])[0] + result["task"] = self.task return result @classmethod @@ -80,15 +78,13 @@ def from_dict(cls, data: dict[str, Any]) -> OrchestrationState: Returns: Restored OrchestrationState instance """ - from agent_framework._workflows._conversation_state import decode_chat_messages - task = None if "task" in data: - decoded_tasks = decode_chat_messages([data["task"]]) + decoded_tasks = [data["task"]] task = decoded_tasks[0] if decoded_tasks else None return cls( - conversation=decode_chat_messages(data.get("conversation", [])), + conversation=data.get("conversation", []), round_index=data.get("round_index", 0), metadata=dict(data.get("metadata", {})), task=task, diff --git a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py index df7c5b1445..e0bac97ea5 100644 --- a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py +++ b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py @@ -24,9 +24,7 @@ FileCheckpointStorage, Workflow, WorkflowBuilder, - WorkflowCheckpoint, WorkflowContext, - get_checkpoint_summary, handler, response_handler, ) @@ -200,24 +198,6 @@ def create_workflow(checkpoint_storage: FileCheckpointStorage) -> Workflow: return workflow_builder.build() -def render_checkpoint_summary(checkpoints: list["WorkflowCheckpoint"]) -> None: - """Pretty-print saved checkpoints with the new framework summaries.""" - - print("\nCheckpoint summary:") - for summary in [get_checkpoint_summary(cp) for cp in sorted(checkpoints, key=lambda c: c.timestamp)]: - # Compose a single line per checkpoint so the user can scan the output - # and pick the resume point that still has outstanding human work. - line = ( - f"- {summary.checkpoint_id} | timestamp={summary.timestamp} | iter={summary.iteration_count} " - f"| targets={summary.targets} | states={summary.executor_ids}" - ) - if summary.status: - line += f" | status={summary.status}" - if summary.pending_request_info_events: - line += f" | pending_request_id={summary.pending_request_info_events[0].request_id}" - print(line) - - def prompt_for_responses(requests: dict[str, HumanApprovalRequest]) -> dict[str, str]: """Interactive CLI prompt for any live RequestInfo requests.""" @@ -310,10 +290,6 @@ async def main() -> None: print("No checkpoints recorded.") return - # Show the user what is available before we prompt for the index. The - # summary helper keeps this output consistent with other tooling. - render_checkpoint_summary(checkpoints) - sorted_cps = sorted(checkpoints, key=lambda c: c.timestamp) print("\nAvailable checkpoints:") for idx, cp in enumerate(sorted_cps): @@ -338,10 +314,6 @@ async def main() -> None: return chosen = sorted_cps[idx] - summary = get_checkpoint_summary(chosen) - if summary.status == "completed": - print("Selected checkpoint already reflects a completed workflow; nothing to resume.") - return new_workflow = create_workflow(checkpoint_storage=storage) # Resume with a fresh workflow instance. The checkpoint carries the From 817db4edeccde2adb8604f5de12d542a54225c4a Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Sun, 8 Feb 2026 08:01:44 -0800 Subject: [PATCH 02/16] WIP: Remove workflow ID in checkpoints --- .../agent_framework/_workflows/_checkpoint.py | 7 ++++-- .../agent_framework/_workflows/_runner.py | 14 +++-------- .../_workflows/_runner_context.py | 25 ++++++------------- .../agent_framework/_workflows/_workflow.py | 2 -- 4 files changed, 17 insertions(+), 31 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 0b8a319736..7122df53a2 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -28,8 +28,9 @@ class WorkflowCheckpoint: enabling workflows to be paused and resumed. Attributes: + graph_signature_hash: Hash of the workflow graph topology to validate checkpoint + compatibility during restore checkpoint_id: Unique identifier for this checkpoint - workflow_id: Identifier of the workflow this checkpoint belongs to timestamp: ISO 8601 timestamp when checkpoint was created messages: Messages exchanged between executors state: Committed workflow state including user data and executor states. @@ -48,8 +49,10 @@ class WorkflowCheckpoint: See State class documentation for details on reserved keys. """ + # Hash of the workflow graph topology to validate checkpoint compatibility during restore + graph_signature_hash: str + checkpoint_id: str = field(default_factory=lambda: str(uuid.uuid4())) - workflow_id: str = "" timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat()) # Core workflow state diff --git a/python/packages/core/agent_framework/_workflows/_runner.py b/python/packages/core/agent_framework/_workflows/_runner.py index 0de6db1d75..bc36cfc4a5 100644 --- a/python/packages/core/agent_framework/_workflows/_runner.py +++ b/python/packages/core/agent_framework/_workflows/_runner.py @@ -38,7 +38,6 @@ def __init__( ctx: RunnerContext, graph_signature_hash: str, max_iterations: int = 100, - workflow_id: str | None = None, ) -> None: """Initialize the runner with edges, state, and context. @@ -49,7 +48,6 @@ def __init__( ctx: The runner context for the workflow. graph_signature_hash: A hash representing the workflow graph topology for checkpoint validation. max_iterations: The maximum number of iterations to run. - workflow_id: The workflow ID for checkpointing. """ # Workflow instance related attributes self._executors = executors @@ -57,7 +55,6 @@ def __init__( self._edge_runner_map = self._parse_edge_runners(self._edge_runners) self._ctx = ctx self._graph_signature_hash = graph_signature_hash - self._workflow_id = workflow_id # Runner state related attributes self._iteration = 0 @@ -66,10 +63,6 @@ def __init__( self._running = False self._resumed_from_checkpoint = False # Track whether we resumed - # Set workflow ID in context if provided - if workflow_id: - self._ctx.set_workflow_id(workflow_id) - @property def context(self) -> RunnerContext: """Get the workflow context.""" @@ -190,12 +183,14 @@ async def _create_checkpoint_if_enabled(self) -> str | None: return None try: - # Snapshot executor states + # Save executor states into the shared state before creating the checkpoint, + # so that they are included in the checkpoint payload. await self._save_executor_states() + checkpoint_id = await self._ctx.create_checkpoint( + self._graph_signature_hash, self._state, self._iteration, - metadata={"graph_signature": self._graph_signature_hash}, ) logger.info(f"Created checkpoint: {checkpoint_id}") @@ -251,7 +246,6 @@ async def restore_from_checkpoint( checkpoint_id, ) - self._workflow_id = checkpoint.workflow_id # Restore state self._state.import_state(checkpoint.state) # Restore executor states using the restored state diff --git a/python/packages/core/agent_framework/_workflows/_runner_context.py b/python/packages/core/agent_framework/_workflows/_runner_context.py index 885fe65119..d0024f6178 100644 --- a/python/packages/core/agent_framework/_workflows/_runner_context.py +++ b/python/packages/core/agent_framework/_workflows/_runner_context.py @@ -4,7 +4,6 @@ import asyncio import logging -import uuid from copy import copy from dataclasses import dataclass from enum import Enum @@ -173,11 +172,6 @@ def clear_runtime_checkpoint_storage(self) -> None: """Clear runtime checkpoint storage override.""" ... - # Checkpointing APIs (optional, enabled by storage) - def set_workflow_id(self, workflow_id: str) -> None: - """Set the workflow ID for the context.""" - ... - def reset_for_new_run(self) -> None: """Reset the context for a new workflow run.""" ... @@ -200,6 +194,7 @@ def is_streaming(self) -> bool: async def create_checkpoint( self, + graph_signature_hash: str, state: State, iteration_count: int, metadata: dict[str, Any] | None = None, @@ -207,6 +202,8 @@ async def create_checkpoint( """Create a checkpoint of the current workflow state. Args: + graph_signature_hash: Hash of the workflow graph topology to + validate checkpoint compatibility during restore. state: The state to include in the checkpoint. This is needed to capture the full state of the workflow. The state is not managed by the context itself. @@ -282,7 +279,6 @@ def __init__(self, checkpoint_storage: CheckpointStorage | None = None): # Checkpointing configuration/state self._checkpoint_storage = checkpoint_storage self._runtime_checkpoint_storage: CheckpointStorage | None = None - self._workflow_id: str | None = None # Streaming flag - set by workflow's run(..., stream=True) vs run(..., stream=False) self._streaming: bool = False @@ -357,6 +353,7 @@ def has_checkpointing(self) -> bool: async def create_checkpoint( self, + graph_signature_hash: str, state: State, iteration_count: int, metadata: dict[str, Any] | None = None, @@ -366,7 +363,7 @@ async def create_checkpoint( raise ValueError("Checkpoint storage not configured") checkpoint = WorkflowCheckpoint( - workflow_id=self._workflow_id or str(uuid.uuid4()), + graph_signature_hash=graph_signature_hash, messages=dict(self._messages), state=state.export_state(), pending_request_info_events=dict(self._pending_request_info_events), @@ -374,7 +371,7 @@ async def create_checkpoint( metadata=metadata or {}, ) checkpoint_id = await storage.save_checkpoint(checkpoint) - logger.info(f"Created checkpoint {checkpoint_id} for workflow {self._workflow_id}") + logger.debug(f"Created checkpoint {checkpoint_id}") return checkpoint_id async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: @@ -408,14 +405,8 @@ async def apply_checkpoint(self, checkpoint: WorkflowCheckpoint) -> None: self._pending_request_info_events[request_id] = request_info_event await self.add_event(request_info_event) - # Restore workflow ID - self._workflow_id = checkpoint.workflow_id - # endregion Checkpointing - def set_workflow_id(self, workflow_id: str) -> None: - self._workflow_id = workflow_id - def set_streaming(self, streaming: bool) -> None: """Set whether agents should stream incremental updates. @@ -438,8 +429,8 @@ async def add_request_info_event(self, event: WorkflowEvent[Any]) -> None: Args: event: The WorkflowEvent with type='request_info' to be added. """ - if event.request_id is None: - raise ValueError("request_info event must have a request_id") + if event.type != "request_info": + raise ValueError("Event type must be 'request_info'") self._pending_request_info_events[event.request_id] = event await self.add_event(event) diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index 8f84ee7301..1c839504fe 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -218,7 +218,6 @@ def __init__( # can assert they are resumed with an equivalent topology. self._hash_graph_signature(self._compute_graph_signature()), max_iterations=max_iterations, - workflow_id=self.id, ) # Flag to prevent concurrent workflow executions @@ -775,7 +774,6 @@ def _compute_graph_signature(self) -> dict[str, Any]: "start_executor": self.start_executor_id, "executors": executors_signature, "edge_groups": edge_groups_signature, - "max_iterations": self.max_iterations, } @staticmethod From 499386d3bc31db613fb390904a17096879ee220f Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Sun, 8 Feb 2026 10:10:44 -0800 Subject: [PATCH 03/16] Refactor checkpointing --- .../agent_framework/_workflows/_checkpoint.py | 236 ++++++++++++++---- .../agent_framework/_workflows/_runner.py | 26 +- .../_workflows/_runner_context.py | 22 +- .../agent_framework/_workflows/_workflow.py | 28 ++- .../_workflows/_workflow_builder.py | 20 +- .../core/agent_framework/observability.py | 2 + .../tests/workflow/test_agent_executor.py | 2 +- .../core/tests/workflow/test_checkpoint.py | 185 +++++++------- .../workflow/test_checkpoint_validation.py | 4 +- .../test_request_info_and_response.py | 2 +- .../test_request_info_event_rehydrate.py | 4 +- .../core/tests/workflow/test_runner.py | 19 +- .../core/tests/workflow/test_serialization.py | 5 +- .../core/tests/workflow/test_sub_workflow.py | 2 +- .../core/tests/workflow/test_workflow.py | 56 +++-- .../tests/workflow/test_workflow_agent.py | 2 +- .../workflow/test_workflow_observability.py | 6 +- 17 files changed, 385 insertions(+), 236 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 7122df53a2..1aca04c0e0 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -11,7 +11,9 @@ from dataclasses import asdict, dataclass, field from datetime import datetime, timezone from pathlib import Path -from typing import TYPE_CHECKING, Any, Protocol +from typing import TYPE_CHECKING, Any, Protocol, TypeAlias + +from ._exceptions import WorkflowCheckpointException logger = logging.getLogger(__name__) @@ -19,6 +21,10 @@ from ._events import WorkflowEvent from ._runner_context import Message +# Type alias for checkpoint IDs in case we want to change the +# underlying type in the future (e.g., to UUID or a custom class) +CheckpointID: TypeAlias = str + @dataclass(slots=True) class WorkflowCheckpoint: @@ -27,10 +33,22 @@ class WorkflowCheckpoint: Checkpoints capture the full execution state of a workflow at a specific point, enabling workflows to be paused and resumed. + Note that a checkpoint is not tied to a specific workflow instance, but rather to + a workflow definition (identified by workflow_name and graph_signature_hash). Thus, + the ID of the workflow instance that created the checkpoint is not included in the + checkpoint data. This allows checkpoints to be shared and restored across different + workflow instances of the same workflow definition. + Attributes: + workflow_name: Name of the workflow this checkpoint belongs to. This acts as a + logical grouping for checkpoints and can be used to filter checkpoints by + workflow. Workflows with the same name are expected to have compatible graph + structures for checkpointing. graph_signature_hash: Hash of the workflow graph topology to validate checkpoint compatibility during restore checkpoint_id: Unique identifier for this checkpoint + previous_checkpoint_id: ID of the previous checkpoint in the chain, if any. This + allows chaining checkpoints together to form a history of workflow states. timestamp: ISO 8601 timestamp when checkpoint was created messages: Messages exchanged between executors state: Committed workflow state including user data and executor states. @@ -49,10 +67,11 @@ class WorkflowCheckpoint: See State class documentation for details on reserved keys. """ - # Hash of the workflow graph topology to validate checkpoint compatibility during restore + workflow_name: str graph_signature_hash: str - checkpoint_id: str = field(default_factory=lambda: str(uuid.uuid4())) + checkpoint_id: CheckpointID = field(default_factory=lambda: str(uuid.uuid4())) + previous_checkpoint_id: CheckpointID | None = None timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat()) # Core workflow state @@ -78,24 +97,73 @@ def from_dict(cls, data: Mapping[str, Any]) -> WorkflowCheckpoint: class CheckpointStorage(Protocol): """Protocol for checkpoint storage backends.""" - async def save_checkpoint(self, checkpoint: WorkflowCheckpoint) -> str: - """Save a checkpoint and return its ID.""" + async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: + """Save a checkpoint and return its ID. + + Args: + checkpoint: The WorkflowCheckpoint object to save. + + Returns: + The unique ID of the saved checkpoint. + """ ... - async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: - """Load a checkpoint by ID.""" + async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: + """Load a checkpoint by ID. + + Args: + checkpoint_id: The unique ID of the checkpoint to load. + + Returns: + The WorkflowCheckpoint object corresponding to the given ID. + + Raises: + WorkflowCheckpointException: If no checkpoint with the given ID exists. + """ ... - async def list_checkpoint_ids(self, workflow_id: str | None = None) -> list[str]: - """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" + async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + """List checkpoint objects for a given workflow name. + + Args: + workflow_name: The name of the workflow to list checkpoints for. + + Returns: + A list of WorkflowCheckpoint objects for the specified workflow name. + """ ... - async def list_checkpoints(self, workflow_id: str | None = None) -> list[WorkflowCheckpoint]: - """List checkpoint objects. If workflow_id is provided, filter by that workflow.""" + async def delete(self, checkpoint_id: CheckpointID) -> bool: + """Delete a checkpoint by ID. + + Args: + checkpoint_id: The unique ID of the checkpoint to delete. + + Returns: + True if the checkpoint was successfully deleted, False if no checkpoint with the given ID exists. + """ ... - async def delete_checkpoint(self, checkpoint_id: str) -> bool: - """Delete a checkpoint by ID.""" + async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + """Get the latest checkpoint for a given workflow name. + + Args: + workflow_name: The name of the workflow to get the latest checkpoint for. + + Returns: + The latest WorkflowCheckpoint object for the specified workflow name, or None if no checkpoints exist. + """ + ... + + async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + """List checkpoint IDs for a given workflow name. + + Args: + workflow_name: The name of the workflow to list checkpoint IDs for. + + Returns: + A list of checkpoint IDs for the specified workflow name. + """ ... @@ -104,34 +172,27 @@ class InMemoryCheckpointStorage: def __init__(self) -> None: """Initialize the memory storage.""" - self._checkpoints: dict[str, WorkflowCheckpoint] = {} + self._checkpoints: dict[CheckpointID, WorkflowCheckpoint] = {} - async def save_checkpoint(self, checkpoint: WorkflowCheckpoint) -> str: + async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: """Save a checkpoint and return its ID.""" self._checkpoints[checkpoint.checkpoint_id] = checkpoint logger.debug(f"Saved checkpoint {checkpoint.checkpoint_id} to memory") return checkpoint.checkpoint_id - async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: + async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: """Load a checkpoint by ID.""" checkpoint = self._checkpoints.get(checkpoint_id) if checkpoint: logger.debug(f"Loaded checkpoint {checkpoint_id} from memory") - return checkpoint + return checkpoint + raise WorkflowCheckpointException(f"No checkpoint found with ID {checkpoint_id}") - async def list_checkpoint_ids(self, workflow_id: str | None = None) -> list[str]: - """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" - if workflow_id is None: - return list(self._checkpoints.keys()) - return [cp.checkpoint_id for cp in self._checkpoints.values() if cp.workflow_id == workflow_id] - - async def list_checkpoints(self, workflow_id: str | None = None) -> list[WorkflowCheckpoint]: - """List checkpoint objects. If workflow_id is provided, filter by that workflow.""" - if workflow_id is None: - return list(self._checkpoints.values()) - return [cp for cp in self._checkpoints.values() if cp.workflow_id == workflow_id] + async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + """List checkpoint objects for a given workflow name.""" + return [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] - async def delete_checkpoint(self, checkpoint_id: str) -> bool: + async def delete(self, checkpoint_id: CheckpointID) -> bool: """Delete a checkpoint by ID.""" if checkpoint_id in self._checkpoints: del self._checkpoints[checkpoint_id] @@ -139,6 +200,19 @@ async def delete_checkpoint(self, checkpoint_id: str) -> bool: return True return False + async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + """Get the latest checkpoint for a given workflow name.""" + checkpoints = [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] + if not checkpoints: + return None + latest_checkpoint = max(checkpoints, key=lambda cp: cp.timestamp) + logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") + return latest_checkpoint + + async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" + return [cp.checkpoint_id for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] + class FileCheckpointStorage: """File-based checkpoint storage for persistence. @@ -158,8 +232,15 @@ def __init__(self, storage_path: str | Path): self.storage_path.mkdir(parents=True, exist_ok=True) logger.info(f"Initialized file checkpoint storage at {self.storage_path}") - async def save_checkpoint(self, checkpoint: WorkflowCheckpoint) -> str: - """Save a checkpoint and return its ID.""" + async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: + """Save a checkpoint and return its ID. + + Args: + checkpoint: The WorkflowCheckpoint object to save. + + Returns: + The unique ID of the saved checkpoint. + """ from ._checkpoint_encoding import encode_checkpoint_value file_path = self.storage_path / f"{checkpoint.checkpoint_id}.json" @@ -177,12 +258,22 @@ def _write_atomic() -> None: logger.info(f"Saved checkpoint {checkpoint.checkpoint_id} to {file_path}") return checkpoint.checkpoint_id - async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: - """Load a checkpoint by ID.""" + async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: + """Load a checkpoint by ID. + + Args: + checkpoint_id: The unique ID of the checkpoint to load. + + Returns: + The WorkflowCheckpoint object corresponding to the given ID. + + Raises: + WorkflowCheckpointException: If no checkpoint with the given ID exists. + """ file_path = self.storage_path / f"{checkpoint_id}.json" if not file_path.exists(): - return None + raise WorkflowCheckpointException(f"No checkpoint found with ID {checkpoint_id}") def _read() -> dict[str, Any]: with open(file_path) as f: @@ -197,25 +288,15 @@ def _read() -> dict[str, Any]: logger.info(f"Loaded checkpoint {checkpoint_id} from {file_path}") return checkpoint - async def list_checkpoint_ids(self, workflow_id: str | None = None) -> list[str]: - """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" + async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + """List checkpoint objects for a given workflow name. - def _list_ids() -> list[str]: - checkpoint_ids: list[str] = [] - for file_path in self.storage_path.glob("*.json"): - try: - with open(file_path) as f: - data = json.load(f) - if workflow_id is None or data.get("workflow_id") == workflow_id: - checkpoint_ids.append(data.get("checkpoint_id", file_path.stem)) - except Exception as e: - logger.warning(f"Failed to read checkpoint file {file_path}: {e}") - return checkpoint_ids - - return await asyncio.to_thread(_list_ids) + Args: + workflow_name: The name of the workflow to list checkpoints for. - async def list_checkpoints(self, workflow_id: str | None = None) -> list[WorkflowCheckpoint]: - """List checkpoint objects. If workflow_id is provided, filter by that workflow.""" + Returns: + A list of WorkflowCheckpoint objects for the specified workflow name. + """ def _list_checkpoints() -> list[WorkflowCheckpoint]: checkpoints: list[WorkflowCheckpoint] = [] @@ -227,7 +308,7 @@ def _list_checkpoints() -> list[WorkflowCheckpoint]: decoded_checkpoint_dict = decode_checkpoint_value(encoded_checkpoint) checkpoint = WorkflowCheckpoint.from_dict(decoded_checkpoint_dict) - if workflow_id is None or checkpoint.workflow_id == workflow_id: + if checkpoint.workflow_name == workflow_name: checkpoints.append(checkpoint) except Exception as e: logger.warning(f"Failed to read checkpoint file {file_path}: {e}") @@ -235,8 +316,15 @@ def _list_checkpoints() -> list[WorkflowCheckpoint]: return await asyncio.to_thread(_list_checkpoints) - async def delete_checkpoint(self, checkpoint_id: str) -> bool: - """Delete a checkpoint by ID.""" + async def delete(self, checkpoint_id: CheckpointID) -> bool: + """Delete a checkpoint by ID. + + Args: + checkpoint_id: The unique ID of the checkpoint to delete. + + Returns: + True if the checkpoint was successfully deleted, False if no checkpoint with the given ID exists. + """ file_path = self.storage_path / f"{checkpoint_id}.json" def _delete() -> bool: @@ -247,3 +335,43 @@ def _delete() -> bool: return False return await asyncio.to_thread(_delete) + + async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + """Get the latest checkpoint for a given workflow name. + + Args: + workflow_name: The name of the workflow to get the latest checkpoint for. + + Returns: + The latest WorkflowCheckpoint object for the specified workflow name, or None if no checkpoints exist. + """ + checkpoints = await self.list(workflow_name) + if not checkpoints: + return None + latest_checkpoint = max(checkpoints, key=lambda cp: cp.timestamp) + logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") + return latest_checkpoint + + async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + """List checkpoint IDs for a given workflow name. + + Args: + workflow_name: The name of the workflow to list checkpoint IDs for. + + Returns: + A list of checkpoint IDs for the specified workflow name. + """ + + def _list_ids() -> list[CheckpointID]: + checkpoint_ids: list[CheckpointID] = [] + for file_path in self.storage_path.glob("*.json"): + try: + with open(file_path) as f: + data = json.load(f) + if data.get("workflow_name") == workflow_name: + checkpoint_ids.append(data.get("checkpoint_id", file_path.stem)) + except Exception as e: + logger.warning(f"Failed to read checkpoint file {file_path}: {e}") + return checkpoint_ids + + return await asyncio.to_thread(_list_ids) diff --git a/python/packages/core/agent_framework/_workflows/_runner.py b/python/packages/core/agent_framework/_workflows/_runner.py index bc36cfc4a5..c37a915b19 100644 --- a/python/packages/core/agent_framework/_workflows/_runner.py +++ b/python/packages/core/agent_framework/_workflows/_runner.py @@ -7,7 +7,7 @@ from collections.abc import AsyncGenerator, Sequence from typing import Any -from ._checkpoint import CheckpointStorage, WorkflowCheckpoint +from ._checkpoint import CheckpointID, CheckpointStorage, WorkflowCheckpoint from ._const import EXECUTOR_STATE_KEY from ._edge import EdgeGroup from ._edge_runner import EdgeRunner, create_edge_runner @@ -36,6 +36,7 @@ def __init__( executors: dict[str, Executor], state: State, ctx: RunnerContext, + workflow_name: str, graph_signature_hash: str, max_iterations: int = 100, ) -> None: @@ -46,6 +47,7 @@ def __init__( executors: Map of executor IDs to executor instances. state: The state for the workflow. ctx: The runner context for the workflow. + workflow_name: The name of the workflow, used for checkpoint labeling. graph_signature_hash: A hash representing the workflow graph topology for checkpoint validation. max_iterations: The maximum number of iterations to run. """ @@ -54,6 +56,7 @@ def __init__( self._edge_runners = [create_edge_runner(group, executors) for group in edge_groups] self._edge_runner_map = self._parse_edge_runners(self._edge_runners) self._ctx = ctx + self._workflow_name = workflow_name self._graph_signature_hash = graph_signature_hash # Runner state related attributes @@ -78,6 +81,7 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: raise WorkflowRunnerException("Runner is already running.") self._running = True + previous_checkpoint_id: CheckpointID | None = None try: # Emit any events already produced prior to entering loop if await self._ctx.has_events(): @@ -90,7 +94,7 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: # initial state before any iterations have run. This is only needed if it's not a resume from checkpoint # scenario, since if we are resuming, the caller should have already created a checkpoint to resume from. if await self._ctx.has_messages() and not self._resumed_from_checkpoint: - await self._create_checkpoint_if_enabled() + previous_checkpoint_id = await self._create_checkpoint_if_enabled(previous_checkpoint_id) while self._iteration < self._max_iterations: logger.info(f"Starting superstep {self._iteration + 1}") @@ -137,7 +141,7 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: self._state.commit() # Create checkpoint after each superstep iteration - await self._create_checkpoint_if_enabled() + previous_checkpoint_id = await self._create_checkpoint_if_enabled(previous_checkpoint_id) yield WorkflowEvent.superstep_completed(iteration=self._iteration) @@ -177,7 +181,7 @@ async def _deliver_message_inner(edge_runner: EdgeRunner, message: Message) -> b tasks = [_deliver_messages(source_executor_id, messages) for source_executor_id, messages in messages.items()] await asyncio.gather(*tasks) - async def _create_checkpoint_if_enabled(self) -> str | None: + async def _create_checkpoint_if_enabled(self, previous_checkpoint_id: CheckpointID | None) -> CheckpointID | None: """Create a checkpoint if checkpointing is enabled and attach a label and metadata.""" if not self._ctx.has_checkpointing(): return None @@ -188,8 +192,10 @@ async def _create_checkpoint_if_enabled(self) -> str | None: await self._save_executor_states() checkpoint_id = await self._ctx.create_checkpoint( + self._workflow_name, self._graph_signature_hash, self._state, + previous_checkpoint_id, self._iteration, ) @@ -201,7 +207,7 @@ async def _create_checkpoint_if_enabled(self) -> str | None: async def restore_from_checkpoint( self, - checkpoint_id: str, + checkpoint_id: CheckpointID, checkpoint_storage: CheckpointStorage | None = None, ) -> None: """Restore workflow state from a checkpoint. @@ -223,7 +229,7 @@ async def restore_from_checkpoint( if self._ctx.has_checkpointing(): checkpoint = await self._ctx.load_checkpoint(checkpoint_id) elif checkpoint_storage is not None: - checkpoint = await checkpoint_storage.load_checkpoint(checkpoint_id) + checkpoint = await checkpoint_storage.load(checkpoint_id) else: raise WorkflowCheckpointException( "Cannot load checkpoint: no checkpointing configured in context or external storage provided." @@ -234,17 +240,11 @@ async def restore_from_checkpoint( raise WorkflowCheckpointException(f"Checkpoint {checkpoint_id} not found") # Validate the loaded checkpoint against the workflow - checkpoint_hash = (checkpoint.metadata or {}).get("graph_signature") - if checkpoint_hash and self._graph_signature_hash != checkpoint_hash: + if self._graph_signature_hash != checkpoint.graph_signature_hash: raise WorkflowCheckpointException( "Workflow graph has changed since the checkpoint was created. " "Please rebuild the original workflow before resuming." ) - if self._graph_signature_hash and not checkpoint_hash: - logger.warning( - "Checkpoint %s does not include graph signature metadata; skipping topology validation.", - checkpoint_id, - ) # Restore state self._state.import_state(checkpoint.state) diff --git a/python/packages/core/agent_framework/_workflows/_runner_context.py b/python/packages/core/agent_framework/_workflows/_runner_context.py index d0024f6178..be03bfe496 100644 --- a/python/packages/core/agent_framework/_workflows/_runner_context.py +++ b/python/packages/core/agent_framework/_workflows/_runner_context.py @@ -9,7 +9,7 @@ from enum import Enum from typing import Any, Protocol, TypeVar, runtime_checkable -from ._checkpoint import CheckpointStorage, WorkflowCheckpoint +from ._checkpoint import CheckpointID, CheckpointStorage, WorkflowCheckpoint from ._const import INTERNAL_SOURCE_ID from ._events import WorkflowEvent from ._state import State @@ -194,19 +194,23 @@ def is_streaming(self) -> bool: async def create_checkpoint( self, + workflow_name: str, graph_signature_hash: str, state: State, + previous_checkpoint_id: CheckpointID | None, iteration_count: int, metadata: dict[str, Any] | None = None, - ) -> str: + ) -> CheckpointID: """Create a checkpoint of the current workflow state. Args: + workflow_name: The name of the workflow for which the checkpoint is being created. graph_signature_hash: Hash of the workflow graph topology to validate checkpoint compatibility during restore. state: The state to include in the checkpoint. This is needed to capture the full state of the workflow. The state is not managed by the context itself. + previous_checkpoint_id: The ID of the previous checkpoint, if any, to form a checkpoint chain. iteration_count: The current iteration count of the workflow. metadata: Optional metadata to associate with the checkpoint. @@ -215,7 +219,7 @@ async def create_checkpoint( """ ... - async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: + async def load_checkpoint(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint | None: """Load a checkpoint without mutating the current context state. Args: @@ -353,32 +357,36 @@ def has_checkpointing(self) -> bool: async def create_checkpoint( self, + workflow_name: str, graph_signature_hash: str, state: State, + previous_checkpoint_id: CheckpointID | None, iteration_count: int, metadata: dict[str, Any] | None = None, - ) -> str: + ) -> CheckpointID: storage = self._get_effective_checkpoint_storage() if not storage: raise ValueError("Checkpoint storage not configured") checkpoint = WorkflowCheckpoint( + workflow_name=workflow_name, graph_signature_hash=graph_signature_hash, + previous_checkpoint_id=previous_checkpoint_id, messages=dict(self._messages), state=state.export_state(), pending_request_info_events=dict(self._pending_request_info_events), iteration_count=iteration_count, metadata=metadata or {}, ) - checkpoint_id = await storage.save_checkpoint(checkpoint) + checkpoint_id = await storage.save(checkpoint) logger.debug(f"Created checkpoint {checkpoint_id}") return checkpoint_id - async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: + async def load_checkpoint(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: storage = self._get_effective_checkpoint_storage() if not storage: raise ValueError("Checkpoint storage not configured") - return await storage.load_checkpoint(checkpoint_id) + return await storage.load(checkpoint_id) def reset_for_new_run(self) -> None: """Reset the context for a new workflow run. diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index 3e62f95289..a043a785f9 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -175,9 +175,9 @@ def __init__( executors: dict[str, Executor], start_executor: Executor, runner_context: RunnerContext, - max_iterations: int = DEFAULT_MAX_ITERATIONS, - name: str | None = None, + name: str, description: str | None = None, + max_iterations: int = DEFAULT_MAX_ITERATIONS, output_executors: list[str] | None = None, **kwargs: Any, ): @@ -189,8 +189,12 @@ def __init__( start_executor: The starting executor for the workflow. runner_context: The RunnerContext instance to be used during workflow execution. max_iterations: The maximum number of iterations the workflow will run for convergence. - name: Optional human-readable name for the workflow. - description: Optional description of what the workflow does. + name: Optional human-readable name for the workflow. This can be used to identify the workflow in + checkpoints, and telemetry. If the workflow is built using WorkflowBuilder, this will be the + name of the builder. This name should be unique across different workflow definitions for + better observability and management. + description: Optional description of what the workflow does. If the workflow is built using + WorkflowBuilder, this will be the description of the builder. output_executors: Optional list of executor IDs whose outputs will be considered workflow outputs. If None or empty, all executor outputs are treated as workflow outputs. kwargs: Additional keyword arguments. Unused in this implementation. @@ -199,9 +203,14 @@ def __init__( self.executors = dict(executors) self.start_executor_id = start_executor.id self.max_iterations = max_iterations - self.id = str(uuid.uuid4()) self.name = name self.description = description + # Generate a unique ID for the workflow instance for monitoring purposes. This is not intended to be a + # stable identifier across instances created from the same builder, for that, use the name field. + self.id = str(uuid.uuid4()) + # Capture a canonical fingerprint of the workflow graph so checkpoints can assert they are resumed with + # an equivalent topology. + self.graph_signature_hash = self._hash_graph_signature(self._compute_graph_signature()) # Output events (WorkflowEvent with type='output') from these executors are treated as workflow outputs. # If None or empty, all executor outputs are considered workflow outputs. @@ -215,9 +224,8 @@ def __init__( self.executors, self._state, runner_context, - # Capture a canonical fingerprint of the workflow graph so checkpoints - # can assert they are resumed with an equivalent topology. - self._hash_graph_signature(self._compute_graph_signature()), + self.name, + self.graph_signature_hash, max_iterations=max_iterations, ) @@ -237,6 +245,7 @@ def _reset_running_flag(self) -> None: def to_dict(self) -> dict[str, Any]: """Serialize the workflow definition into a JSON-ready dictionary.""" data: dict[str, Any] = { + "name": self.name, "id": self.id, "start_executor_id": self.start_executor_id, "max_iterations": self.max_iterations, @@ -245,9 +254,6 @@ def to_dict(self) -> dict[str, Any]: "output_executors": self._output_executors, } - # Add optional name and description if provided - if self.name is not None: - data["name"] = self.name if self.description is not None: data["description"] = self.description diff --git a/python/packages/core/agent_framework/_workflows/_workflow_builder.py b/python/packages/core/agent_framework/_workflows/_workflow_builder.py index 14fd512e17..ea4aa84448 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow_builder.py +++ b/python/packages/core/agent_framework/_workflows/_workflow_builder.py @@ -2,6 +2,7 @@ import logging import sys +import uuid from collections.abc import Callable, Sequence from dataclasses import dataclass from typing import Any @@ -164,7 +165,11 @@ def __init__( Args: max_iterations: Maximum number of iterations for workflow convergence. Default is 100. - name: Optional human-readable name for the workflow. + name: A human-readable name for the workflow builder. This name will be the identifier + for all workflow instances created from this builder. If not provided, a unique name + will be generated. This will be useful for versioning, monitoring, checkpointing, and + debugging workflows. Keeping this name unique across versions of your workflow definitions + is recommended for better observability and management. description: Optional description of what the workflow does. start_executor: The starting executor for the workflow. Can be an Executor instance, SupportsAgentRun instance, or the name of a registered executor factory. @@ -177,7 +182,7 @@ def __init__( self._start_executor: Executor | str | None = None self._checkpoint_storage: CheckpointStorage | None = checkpoint_storage self._max_iterations: int = max_iterations - self._name: str | None = name + self._name: str = name or f"WorkflowBuilder-{uuid.uuid4()!s}" self._description: str | None = description # Maps underlying SupportsAgentRun object id -> wrapped Executor so we reuse the same wrapper # across start_executor / add_edge calls. This avoids multiple AgentExecutor instances @@ -1111,19 +1116,18 @@ async def process(self, text: str, ctx: WorkflowContext[Never, str]) -> None: executors, start_executor, context, - self._max_iterations, - name=self._name, + self._name, description=self._description, + max_iterations=self._max_iterations, output_executors=output_executors, ) build_attributes: dict[str, Any] = { + OtelAttr.WORKFLOW_BUILDER_NAME: self._name, OtelAttr.WORKFLOW_ID: workflow.id, OtelAttr.WORKFLOW_DEFINITION: workflow.to_json(), } - if workflow.name: - build_attributes[OtelAttr.WORKFLOW_NAME] = workflow.name - if workflow.description: - build_attributes[OtelAttr.WORKFLOW_DESCRIPTION] = workflow.description + if self._description: + build_attributes[OtelAttr.WORKFLOW_BUILDER_DESCRIPTION] = self._description span.set_attributes(build_attributes) # Add workflow build completed event diff --git a/python/packages/core/agent_framework/observability.py b/python/packages/core/agent_framework/observability.py index 9a839bb566..7ce4b5e5ea 100644 --- a/python/packages/core/agent_framework/observability.py +++ b/python/packages/core/agent_framework/observability.py @@ -196,6 +196,8 @@ class OtelAttr(str, Enum): # Workflow attributes WORKFLOW_ID = "workflow.id" + WORKFLOW_BUILDER_NAME = "workflow_builder.name" + WORKFLOW_BUILDER_DESCRIPTION = "workflow_builder.description" WORKFLOW_NAME = "workflow.name" WORKFLOW_DESCRIPTION = "workflow.description" WORKFLOW_DEFINITION = "workflow.definition" diff --git a/python/packages/core/tests/workflow/test_agent_executor.py b/python/packages/core/tests/workflow/test_agent_executor.py index f767996943..5f089f7fa9 100644 --- a/python/packages/core/tests/workflow/test_agent_executor.py +++ b/python/packages/core/tests/workflow/test_agent_executor.py @@ -84,7 +84,7 @@ async def test_agent_executor_checkpoint_stores_and_restores_state() -> None: assert initial_agent.call_count == 1 # Verify checkpoint was created - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list(wf.name) assert len(checkpoints) > 0 # Get the second checkpoint which should contain the state after processing diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index 9f6d57b2e1..0aa1d24879 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -5,18 +5,22 @@ from datetime import datetime, timezone from pathlib import Path +import pytest + from agent_framework import ( FileCheckpointStorage, InMemoryCheckpointStorage, WorkflowCheckpoint, + WorkflowCheckpointException, ) def test_workflow_checkpoint_default_values(): - checkpoint = WorkflowCheckpoint() + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") assert checkpoint.checkpoint_id != "" - assert checkpoint.workflow_id == "" + assert checkpoint.workflow_name == "test-workflow" + assert checkpoint.graph_signature_hash == "test-hash" assert checkpoint.timestamp != "" assert checkpoint.messages == {} assert checkpoint.state == {} @@ -30,7 +34,8 @@ def test_workflow_checkpoint_custom_values(): custom_timestamp = datetime.now(timezone.utc).isoformat() checkpoint = WorkflowCheckpoint( checkpoint_id="test-checkpoint-123", - workflow_id="test-workflow-456", + workflow_name="test-workflow-456", + graph_signature_hash="test-hash-456", timestamp=custom_timestamp, messages={"executor1": [{"data": "test"}]}, pending_request_info_events={"req123": {"data": "test"}}, @@ -41,7 +46,8 @@ def test_workflow_checkpoint_custom_values(): ) assert checkpoint.checkpoint_id == "test-checkpoint-123" - assert checkpoint.workflow_id == "test-workflow-456" + assert checkpoint.workflow_name == "test-workflow-456" + assert checkpoint.graph_signature_hash == "test-hash-456" assert checkpoint.timestamp == custom_timestamp assert checkpoint.messages == {"executor1": [{"data": "test"}]} assert checkpoint.state == {"key": "value"} @@ -54,20 +60,22 @@ def test_workflow_checkpoint_custom_values(): async def test_memory_checkpoint_storage_save_and_load(): storage = InMemoryCheckpointStorage() checkpoint = WorkflowCheckpoint( - workflow_id="test-workflow", + workflow_name="test-workflow", + graph_signature_hash="test-hash", messages={"executor1": [{"data": "hello"}]}, pending_request_info_events={"req123": {"data": "test"}}, ) # Save checkpoint - saved_id = await storage.save_checkpoint(checkpoint) + saved_id = await storage.save(checkpoint) assert saved_id == checkpoint.checkpoint_id # Load checkpoint - loaded_checkpoint = await storage.load_checkpoint(checkpoint.checkpoint_id) + loaded_checkpoint = await storage.load(checkpoint.checkpoint_id) assert loaded_checkpoint is not None assert loaded_checkpoint.checkpoint_id == checkpoint.checkpoint_id - assert loaded_checkpoint.workflow_id == checkpoint.workflow_id + assert loaded_checkpoint.workflow_name == checkpoint.workflow_name + assert loaded_checkpoint.graph_signature_hash == checkpoint.graph_signature_hash assert loaded_checkpoint.messages == checkpoint.messages assert loaded_checkpoint.pending_request_info_events == checkpoint.pending_request_info_events @@ -75,81 +83,71 @@ async def test_memory_checkpoint_storage_save_and_load(): async def test_memory_checkpoint_storage_load_nonexistent(): storage = InMemoryCheckpointStorage() - result = await storage.load_checkpoint("nonexistent-id") - assert result is None + with pytest.raises(WorkflowCheckpointException): + await storage.load("nonexistent-id") -async def test_memory_checkpoint_storage_list_checkpoints(): +async def test_memory_checkpoint_storage_list(): storage = InMemoryCheckpointStorage() # Create checkpoints for different workflows - checkpoint1 = WorkflowCheckpoint(workflow_id="workflow-1") - checkpoint2 = WorkflowCheckpoint(workflow_id="workflow-1") - checkpoint3 = WorkflowCheckpoint(workflow_id="workflow-2") + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") - await storage.save_checkpoint(checkpoint1) - await storage.save_checkpoint(checkpoint2) - await storage.save_checkpoint(checkpoint3) + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) - # Test list_checkpoint_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") + # Test list_ids for workflow-1 + workflow1_checkpoint_ids = await storage.list_ids("workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids - # Test list_checkpoints for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list_checkpoints("workflow-1") + # Test list for workflow-1 (returns objects) + workflow1_checkpoints = await storage.list("workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) assert {cp.checkpoint_id for cp in workflow1_checkpoints} == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} - # Test list_checkpoint_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") + # Test list_ids for workflow-2 + workflow2_checkpoint_ids = await storage.list_ids("workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids - # Test list_checkpoints for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list_checkpoints("workflow-2") + # Test list for workflow-2 (returns objects) + workflow2_checkpoints = await storage.list("workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id - # Test list_checkpoint_ids for non-existent workflow - empty_checkpoint_ids = await storage.list_checkpoint_ids("nonexistent-workflow") + # Test list_ids for non-existent workflow + empty_checkpoint_ids = await storage.list_ids("nonexistent-workflow") assert len(empty_checkpoint_ids) == 0 - # Test list_checkpoints for non-existent workflow - empty_checkpoints = await storage.list_checkpoints("nonexistent-workflow") + # Test list for non-existent workflow + empty_checkpoints = await storage.list("nonexistent-workflow") assert len(empty_checkpoints) == 0 - # Test list_checkpoint_ids without workflow filter (all checkpoints) - all_checkpoint_ids = await storage.list_checkpoint_ids() - assert len(all_checkpoint_ids) == 3 - expected_ids = {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id, checkpoint3.checkpoint_id} - assert expected_ids.issubset(set(all_checkpoint_ids)) - - # Test list_checkpoints without workflow filter (all checkpoints) - all_checkpoints = await storage.list_checkpoints() - assert len(all_checkpoints) == 3 - assert all(isinstance(cp, WorkflowCheckpoint) for cp in all_checkpoints) - async def test_memory_checkpoint_storage_delete(): storage = InMemoryCheckpointStorage() - checkpoint = WorkflowCheckpoint(workflow_id="test-workflow") + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") # Save checkpoint - await storage.save_checkpoint(checkpoint) - assert await storage.load_checkpoint(checkpoint.checkpoint_id) is not None + await storage.save(checkpoint) + assert await storage.load(checkpoint.checkpoint_id) is not None # Delete checkpoint - result = await storage.delete_checkpoint(checkpoint.checkpoint_id) + result = await storage.delete(checkpoint.checkpoint_id) assert result is True # Verify deletion - assert await storage.load_checkpoint(checkpoint.checkpoint_id) is None + with pytest.raises(WorkflowCheckpointException): + await storage.load(checkpoint.checkpoint_id) # Try to delete again - result = await storage.delete_checkpoint(checkpoint.checkpoint_id) + result = await storage.delete(checkpoint.checkpoint_id) assert result is False @@ -157,14 +155,15 @@ async def test_file_checkpoint_storage_save_and_load(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) checkpoint = WorkflowCheckpoint( - workflow_id="test-workflow", + workflow_name="test-workflow", + graph_signature_hash="test-hash", messages={"executor1": [{"data": "hello", "source_id": "test", "target_id": None}]}, state={"key": "value"}, pending_request_info_events={"req123": {"data": "test"}}, ) # Save checkpoint - saved_id = await storage.save_checkpoint(checkpoint) + saved_id = await storage.save(checkpoint) assert saved_id == checkpoint.checkpoint_id # Verify file was created @@ -172,10 +171,11 @@ async def test_file_checkpoint_storage_save_and_load(): assert file_path.exists() # Load checkpoint - loaded_checkpoint = await storage.load_checkpoint(checkpoint.checkpoint_id) + loaded_checkpoint = await storage.load(checkpoint.checkpoint_id) assert loaded_checkpoint is not None assert loaded_checkpoint.checkpoint_id == checkpoint.checkpoint_id - assert loaded_checkpoint.workflow_id == checkpoint.workflow_id + assert loaded_checkpoint.workflow_name == checkpoint.workflow_name + assert loaded_checkpoint.graph_signature_hash == checkpoint.graph_signature_hash assert loaded_checkpoint.messages == checkpoint.messages assert loaded_checkpoint.state == checkpoint.state assert loaded_checkpoint.pending_request_info_events == checkpoint.pending_request_info_events @@ -185,72 +185,64 @@ async def test_file_checkpoint_storage_load_nonexistent(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) - result = await storage.load_checkpoint("nonexistent-id") - assert result is None + with pytest.raises(WorkflowCheckpointException): + await storage.load("nonexistent-id") -async def test_file_checkpoint_storage_list_checkpoints(): +async def test_file_checkpoint_storage_list(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) # Create checkpoints for different workflows - checkpoint1 = WorkflowCheckpoint(workflow_id="workflow-1") - checkpoint2 = WorkflowCheckpoint(workflow_id="workflow-1") - checkpoint3 = WorkflowCheckpoint(workflow_id="workflow-2") + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") - await storage.save_checkpoint(checkpoint1) - await storage.save_checkpoint(checkpoint2) - await storage.save_checkpoint(checkpoint3) + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) - # Test list_checkpoint_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") + # Test list_ids for workflow-1 + workflow1_checkpoint_ids = await storage.list_ids("workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids - # Test list_checkpoints for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list_checkpoints("workflow-1") + # Test list for workflow-1 (returns objects) + workflow1_checkpoints = await storage.list("workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) checkpoint_ids = {cp.checkpoint_id for cp in workflow1_checkpoints} assert checkpoint_ids == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} - # Test list_checkpoint_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") + # Test list_ids for workflow-2 + workflow2_checkpoint_ids = await storage.list_ids("workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids - # Test list_checkpoints for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list_checkpoints("workflow-2") + # Test list for workflow-2 (returns objects) + workflow2_checkpoints = await storage.list("workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id - # Test list all checkpoints - all_checkpoint_ids = await storage.list_checkpoint_ids() - assert len(all_checkpoint_ids) == 3 - - all_checkpoints = await storage.list_checkpoints() - assert len(all_checkpoints) == 3 - assert all(isinstance(cp, WorkflowCheckpoint) for cp in all_checkpoints) - async def test_file_checkpoint_storage_delete(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) - checkpoint = WorkflowCheckpoint(workflow_id="test-workflow") + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") # Save checkpoint - await storage.save_checkpoint(checkpoint) + await storage.save(checkpoint) file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" assert file_path.exists() # Delete checkpoint - result = await storage.delete_checkpoint(checkpoint.checkpoint_id) + result = await storage.delete(checkpoint.checkpoint_id) assert result is True assert not file_path.exists() # Try to delete again - result = await storage.delete_checkpoint(checkpoint.checkpoint_id) + result = await storage.delete(checkpoint.checkpoint_id) assert result is False @@ -264,8 +256,8 @@ async def test_file_checkpoint_storage_directory_creation(): assert nested_path.is_dir() # Should be able to save checkpoints - checkpoint = WorkflowCheckpoint(workflow_id="test") - await storage.save_checkpoint(checkpoint) + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") + await storage.save(checkpoint) file_path = nested_path / f"{checkpoint.checkpoint_id}.json" assert file_path.exists() @@ -280,8 +272,8 @@ async def test_file_checkpoint_storage_corrupted_file(): with open(corrupted_file, "w") as f: # noqa: ASYNC230 f.write("{ invalid json }") - # list_checkpoints should handle the corrupted file gracefully - checkpoints = await storage.list_checkpoints("any-workflow") + # list should handle the corrupted file gracefully + checkpoints = await storage.list("any-workflow") assert checkpoints == [] @@ -291,15 +283,16 @@ async def test_file_checkpoint_storage_json_serialization(): # Create checkpoint with complex nested data checkpoint = WorkflowCheckpoint( - workflow_id="complex-workflow", + workflow_name="test-workflow", + graph_signature_hash="test-hash", messages={"executor1": [{"data": {"nested": {"value": 42}}, "source_id": "test", "target_id": None}]}, state={"list": [1, 2, 3], "dict": {"a": "b", "c": {"d": "e"}}, "bool": True, "null": None}, pending_request_info_events={"req123": {"data": "test"}}, ) # Save and load - await storage.save_checkpoint(checkpoint) - loaded = await storage.load_checkpoint(checkpoint.checkpoint_id) + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) assert loaded is not None assert loaded.messages == checkpoint.messages @@ -326,13 +319,13 @@ def test_checkpoint_storage_protocol_compliance(): for storage in [memory_storage, file_storage]: # Test that all protocol methods exist and are callable - assert hasattr(storage, "save_checkpoint") - assert callable(storage.save_checkpoint) - assert hasattr(storage, "load_checkpoint") - assert callable(storage.load_checkpoint) - assert hasattr(storage, "list_checkpoint_ids") - assert callable(storage.list_checkpoint_ids) - assert hasattr(storage, "list_checkpoints") - assert callable(storage.list_checkpoints) - assert hasattr(storage, "delete_checkpoint") - assert callable(storage.delete_checkpoint) + assert hasattr(storage, "save") + assert callable(storage.save) + assert hasattr(storage, "load") + assert callable(storage.load) + assert hasattr(storage, "list") + assert callable(storage.list) + assert hasattr(storage, "delete") + assert callable(storage.delete) + assert hasattr(storage, "list_ids") + assert callable(storage.list_ids) diff --git a/python/packages/core/tests/workflow/test_checkpoint_validation.py b/python/packages/core/tests/workflow/test_checkpoint_validation.py index c028a94b40..38c54b81da 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_validation.py +++ b/python/packages/core/tests/workflow/test_checkpoint_validation.py @@ -43,7 +43,7 @@ async def test_resume_fails_when_graph_mismatch() -> None: # Run once to create checkpoints _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list(workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] @@ -66,7 +66,7 @@ async def test_resume_succeeds_when_graph_matches() -> None: workflow = build_workflow(storage, finish_id="finish") _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = sorted(await storage.list_checkpoints(), key=lambda c: c.timestamp) + checkpoints = sorted(await storage.list(workflow.name), key=lambda c: c.timestamp) target_checkpoint = checkpoints[0] resumed_workflow = build_workflow(storage, finish_id="finish") diff --git a/python/packages/core/tests/workflow/test_request_info_and_response.py b/python/packages/core/tests/workflow/test_request_info_and_response.py index 75d11b15af..7681f75f0b 100644 --- a/python/packages/core/tests/workflow/test_request_info_and_response.py +++ b/python/packages/core/tests/workflow/test_request_info_and_response.py @@ -349,7 +349,7 @@ async def test_checkpoint_with_pending_request_info_events(self): assert request_info_event.source_executor_id == "approval_executor" # Step 2: List checkpoints to find the one with our pending request - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list(workflow.name) assert len(checkpoints) > 0, "No checkpoints were created during workflow execution" # Find the checkpoint with our pending request diff --git a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py index dfbf134185..7ed504ef0c 100644 --- a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py +++ b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py @@ -46,7 +46,7 @@ async def test_rehydrate_request_info_event() -> None: runner_context = InProcRunnerContext(InMemoryCheckpointStorage()) await runner_context.add_request_info_event(request_info_event) - checkpoint_id = await runner_context.create_checkpoint(State(), iteration_count=1) + checkpoint_id = await runner_context.create_checkpoint("test_name", "test_hash", State(), None, iteration_count=1) checkpoint = await runner_context.load_checkpoint(checkpoint_id) assert checkpoint is not None @@ -85,7 +85,7 @@ async def test_request_info_event_serializes_non_json_payloads() -> None: await runner_context.add_request_info_event(req_1) await runner_context.add_request_info_event(req_2) - checkpoint_id = await runner_context.create_checkpoint(State(), iteration_count=1) + checkpoint_id = await runner_context.create_checkpoint("test_name", "test_hash", State(), None, iteration_count=1) checkpoint = await runner_context.load_checkpoint(checkpoint_id) # Should be JSON serializable despite datetime/slots diff --git a/python/packages/core/tests/workflow/test_runner.py b/python/packages/core/tests/workflow/test_runner.py index d61aa22cca..bd5531d588 100644 --- a/python/packages/core/tests/workflow/test_runner.py +++ b/python/packages/core/tests/workflow/test_runner.py @@ -61,7 +61,14 @@ def test_create_runner(): executor_b.id: executor_b, } - runner = Runner(edge_groups, executors, state=State(), ctx=InProcRunnerContext(), graph_signature_hash="test_hash") + runner = Runner( + edge_groups, + executors, + state=State(), + ctx=InProcRunnerContext(), + workflow_name="test_name", + graph_signature_hash="test_hash", + ) assert runner.context is not None and isinstance(runner.context, RunnerContext) @@ -84,7 +91,7 @@ async def test_runner_run_until_convergence(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx, graph_signature_hash="test_hash") + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") result: int | None = None await executor_a.execute( @@ -122,7 +129,7 @@ async def test_runner_run_until_convergence_not_completed(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx, max_iterations=5, graph_signature_hash="test_hash") + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash", max_iterations=5) await executor_a.execute( MockMessage(data=0), @@ -156,7 +163,7 @@ async def test_runner_already_running(): state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, state, ctx, graph_signature_hash="test_hash") + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") await executor_a.execute( MockMessage(data=0), @@ -176,7 +183,7 @@ async def _run(): async def test_runner_emits_runner_completion_for_agent_response_without_targets(): ctx = InProcRunnerContext() - runner = Runner([], {}, State(), ctx, graph_signature_hash="test_hash") + runner = Runner([], {}, State(), ctx, "test_name", graph_signature_hash="test_hash") await ctx.send_message( Message( @@ -228,7 +235,7 @@ async def test_runner_cancellation_stops_active_executor(): shared_state = State() ctx = InProcRunnerContext() - runner = Runner(edges, executors, shared_state, ctx, graph_signature_hash="test_hash") + runner = Runner(edges, executors, shared_state, ctx, "test_name", graph_signature_hash="test_hash") await executor_a.execute( MockMessage(data=0), diff --git a/python/packages/core/tests/workflow/test_serialization.py b/python/packages/core/tests/workflow/test_serialization.py index f579c1be76..55284db407 100644 --- a/python/packages/core/tests/workflow/test_serialization.py +++ b/python/packages/core/tests/workflow/test_serialization.py @@ -647,12 +647,11 @@ def test_workflow_name_description_serialization(self) -> None: # Test 2: Without name and description (defaults) workflow2 = WorkflowBuilder(start_executor=SampleExecutor(id="e2")).build() - assert workflow2.name is None + assert workflow2.name is not None assert workflow2.description is None data2 = workflow2.to_dict() - assert "name" not in data2 # Should not include None values - assert "description" not in data2 + assert "description" not in data2 # Should not include None values # Test 3: With only name (no description) workflow3 = WorkflowBuilder(name="Named Only", start_executor=SampleExecutor(id="e3")).build() diff --git a/python/packages/core/tests/workflow/test_sub_workflow.py b/python/packages/core/tests/workflow/test_sub_workflow.py index 55afad880f..d1db6882a1 100644 --- a/python/packages/core/tests/workflow/test_sub_workflow.py +++ b/python/packages/core/tests/workflow/test_sub_workflow.py @@ -595,7 +595,7 @@ async def test_sub_workflow_checkpoint_restore_no_duplicate_requests() -> None: assert first_request_id is not None # Get checkpoint - checkpoints = await storage.list_checkpoints(workflow1.id) + checkpoints = await storage.list(workflow1.name) checkpoint_id = max(checkpoints, key=lambda cp: cp.iteration_count).checkpoint_id # Step 2: Resume workflow from checkpoint diff --git a/python/packages/core/tests/workflow/test_workflow.py b/python/packages/core/tests/workflow/test_workflow.py index e7c38ebea9..8079e464e6 100644 --- a/python/packages/core/tests/workflow/test_workflow.py +++ b/python/packages/core/tests/workflow/test_workflow.py @@ -335,12 +335,9 @@ async def test_workflow_run_stream_from_checkpoint_invalid_checkpoint( ) # Attempt to run from non-existent checkpoint should fail - try: + with pytest.raises(WorkflowCheckpointException, match="No checkpoint found with ID nonexistent_checkpoint_id"): async for _ in workflow.run(checkpoint_id="nonexistent_checkpoint_id", stream=True): pass - raise AssertionError("Expected WorkflowCheckpointException to be raised") - except WorkflowCheckpointException as e: - assert str(e) == "Checkpoint nonexistent_checkpoint_id not found" async def test_workflow_run_stream_from_checkpoint_with_external_storage( @@ -354,12 +351,14 @@ async def test_workflow_run_stream_from_checkpoint_with_external_storage( from agent_framework import WorkflowCheckpoint test_checkpoint = WorkflowCheckpoint( - workflow_id="test-workflow", + workflow_name="test-workflow", + graph_signature_hash="test-graph-signature", + previous_checkpoint_id=None, messages={}, state={}, iteration_count=0, ) - checkpoint_id = await storage.save_checkpoint(test_checkpoint) + checkpoint_id = await storage.save(test_checkpoint) # Create a workflow WITHOUT checkpointing workflow_without_checkpointing = ( @@ -385,23 +384,25 @@ async def test_workflow_run_from_checkpoint_non_streaming(simple_executor: Execu with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) + # Build workflow with checkpointing + workflow = ( + WorkflowBuilder(start_executor=simple_executor, checkpoint_storage=storage) + .add_edge(simple_executor, simple_executor) + .build() + ) + # Create a test checkpoint manually in storage from agent_framework import WorkflowCheckpoint test_checkpoint = WorkflowCheckpoint( - workflow_id="test-workflow", + workflow_name=workflow.name, + graph_signature_hash=workflow.graph_signature_hash, + previous_checkpoint_id=None, messages={}, state={}, iteration_count=0, ) - checkpoint_id = await storage.save_checkpoint(test_checkpoint) - - # Build workflow with checkpointing - workflow = ( - WorkflowBuilder(start_executor=simple_executor, checkpoint_storage=storage) - .add_edge(simple_executor, simple_executor) - .build() - ) + checkpoint_id = await storage.save(test_checkpoint) # Test non-streaming run method with checkpoint_id result = await workflow.run(checkpoint_id=checkpoint_id) @@ -416,11 +417,19 @@ async def test_workflow_run_stream_from_checkpoint_with_responses( with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) + # Build workflow with checkpointing + workflow = ( + WorkflowBuilder(start_executor=simple_executor, checkpoint_storage=storage) + .add_edge(simple_executor, simple_executor) + .build() + ) + # Create a test checkpoint manually in storage from agent_framework import WorkflowCheckpoint test_checkpoint = WorkflowCheckpoint( - workflow_id="test-workflow", + workflow_name=workflow.name, + graph_signature_hash=workflow.graph_signature_hash, messages={}, state={}, pending_request_info_events={ @@ -433,14 +442,7 @@ async def test_workflow_run_stream_from_checkpoint_with_responses( }, iteration_count=0, ) - checkpoint_id = await storage.save_checkpoint(test_checkpoint) - - # Build workflow with checkpointing - workflow = ( - WorkflowBuilder(start_executor=simple_executor, checkpoint_storage=storage) - .add_edge(simple_executor, simple_executor) - .build() - ) + checkpoint_id = await storage.save(test_checkpoint) # Resume from checkpoint - pending request events should be emitted events: list[WorkflowEvent] = [] @@ -542,7 +544,7 @@ async def test_workflow_checkpoint_runtime_only_configuration( assert result.get_final_state() == WorkflowRunState.IDLE # Verify checkpoints were created - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list(workflow.name) assert len(checkpoints) > 0 # Find a superstep checkpoint to resume from @@ -592,8 +594,8 @@ async def test_workflow_checkpoint_runtime_overrides_buildtime( assert result is not None # Verify checkpoints were created in runtime storage, not build-time storage - buildtime_checkpoints = await buildtime_storage.list_checkpoints() - runtime_checkpoints = await runtime_storage.list_checkpoints() + buildtime_checkpoints = await buildtime_storage.list(workflow.name) + runtime_checkpoints = await runtime_storage.list(workflow.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/core/tests/workflow/test_workflow_agent.py b/python/packages/core/tests/workflow/test_workflow_agent.py index c121f369fa..00d5c7e6a6 100644 --- a/python/packages/core/tests/workflow/test_workflow_agent.py +++ b/python/packages/core/tests/workflow/test_workflow_agent.py @@ -609,7 +609,7 @@ async def test_checkpoint_storage_passed_to_workflow(self) -> None: # Drain workflow events to get checkpoint # The workflow should have created checkpoints - checkpoints = await checkpoint_storage.list_checkpoints(workflow.id) + checkpoints = await checkpoint_storage.list(workflow.name) assert len(checkpoints) > 0, "Checkpoints should have been created when checkpoint_storage is provided" async def test_agent_executor_output_response_false_filters_streaming_events(self): diff --git a/python/packages/core/tests/workflow/test_workflow_observability.py b/python/packages/core/tests/workflow/test_workflow_observability.py index 6b47117ea5..7ef11cd7da 100644 --- a/python/packages/core/tests/workflow/test_workflow_observability.py +++ b/python/packages/core/tests/workflow/test_workflow_observability.py @@ -306,8 +306,8 @@ async def test_end_to_end_workflow_tracing(span_exporter: InMemorySpanExporter) assert len(build_spans_with_metadata) == 1 metadata_build_span = build_spans_with_metadata[0] assert metadata_build_span.attributes is not None - assert metadata_build_span.attributes.get(OtelAttr.WORKFLOW_NAME) == "Test Pipeline" - assert metadata_build_span.attributes.get(OtelAttr.WORKFLOW_DESCRIPTION) == "Test workflow description" + assert metadata_build_span.attributes.get(OtelAttr.WORKFLOW_BUILDER_NAME) == "Test Pipeline" + assert metadata_build_span.attributes.get(OtelAttr.WORKFLOW_BUILDER_DESCRIPTION) == "Test workflow description" # Clear spans to separate build from run tracing span_exporter.clear() @@ -451,7 +451,7 @@ async def test_message_trace_context_serialization(span_exporter: InMemorySpanEx await ctx.send_message(message) # Create a checkpoint that includes the message - checkpoint_id = await ctx.create_checkpoint(State(), 0) + checkpoint_id = await ctx.create_checkpoint("test_name", "test_hash", State(), None, 0) checkpoint = await ctx.load_checkpoint(checkpoint_id) assert checkpoint is not None From 0de2ffaec1823d56d0e5f64665a3c2d878808fca Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Sun, 8 Feb 2026 15:04:09 -0800 Subject: [PATCH 04/16] Add get_latest tests --- .../core/tests/workflow/test_checkpoint.py | 177 ++++++++++++++++++ 1 file changed, 177 insertions(+) diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index 0aa1d24879..c026d6dae5 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -329,3 +329,180 @@ def test_checkpoint_storage_protocol_compliance(): assert callable(storage.delete) assert hasattr(storage, "list_ids") assert callable(storage.list_ids) + assert hasattr(storage, "get_latest") + assert callable(storage.get_latest) + + +def test_workflow_checkpoint_to_dict(): + checkpoint = WorkflowCheckpoint( + checkpoint_id="test-id", + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages={"executor1": [{"data": "test"}]}, + state={"key": "value"}, + iteration_count=5, + ) + + result = checkpoint.to_dict() + + assert result["checkpoint_id"] == "test-id" + assert result["workflow_name"] == "test-workflow" + assert result["graph_signature_hash"] == "test-hash" + assert result["messages"] == {"executor1": [{"data": "test"}]} + assert result["state"] == {"key": "value"} + assert result["iteration_count"] == 5 + + +def test_workflow_checkpoint_previous_checkpoint_id(): + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + previous_checkpoint_id="previous-id-123", + ) + + assert checkpoint.previous_checkpoint_id == "previous-id-123" + + +async def test_memory_checkpoint_storage_get_latest(): + import asyncio + + storage = InMemoryCheckpointStorage() + + # Create checkpoints with small delays to ensure different timestamps + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + await asyncio.sleep(0.01) + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + await asyncio.sleep(0.01) + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") + + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) + + # Test get_latest for workflow-1 + latest = await storage.get_latest("workflow-1") + assert latest is not None + assert latest.checkpoint_id == checkpoint2.checkpoint_id + + # Test get_latest for workflow-2 + latest2 = await storage.get_latest("workflow-2") + assert latest2 is not None + assert latest2.checkpoint_id == checkpoint3.checkpoint_id + + # Test get_latest for non-existent workflow + latest_none = await storage.get_latest("nonexistent-workflow") + assert latest_none is None + + +async def test_file_checkpoint_storage_get_latest(): + import asyncio + + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create checkpoints with small delays to ensure different timestamps + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + await asyncio.sleep(0.01) + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + await asyncio.sleep(0.01) + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") + + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) + + # Test get_latest for workflow-1 + latest = await storage.get_latest("workflow-1") + assert latest is not None + assert latest.checkpoint_id == checkpoint2.checkpoint_id + + # Test get_latest for workflow-2 + latest2 = await storage.get_latest("workflow-2") + assert latest2 is not None + assert latest2.checkpoint_id == checkpoint3.checkpoint_id + + # Test get_latest for non-existent workflow + latest_none = await storage.get_latest("nonexistent-workflow") + assert latest_none is None + + +async def test_file_checkpoint_storage_list_ids_corrupted_file(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create a valid checkpoint first + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") + await storage.save(checkpoint) + + # Create a corrupted JSON file + corrupted_file = Path(temp_dir) / "corrupted.json" + with open(corrupted_file, "w") as f: # noqa: ASYNC230 + f.write("{ invalid json }") + + # list_ids should handle the corrupted file gracefully + checkpoint_ids = await storage.list_ids("test-workflow") + assert len(checkpoint_ids) == 1 + assert checkpoint.checkpoint_id in checkpoint_ids + + +async def test_file_checkpoint_storage_list_ids_empty(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Test list_ids on empty storage + checkpoint_ids = await storage.list_ids("any-workflow") + assert checkpoint_ids == [] + + +async def test_workflow_checkpoint_chaining_via_previous_checkpoint_id(): + """Test that consecutive checkpoints created by a workflow are properly chained via previous_checkpoint_id.""" + from typing_extensions import Never + + from agent_framework import WorkflowBuilder, WorkflowContext, handler + from agent_framework._workflows._executor import Executor + + class StartExecutor(Executor): + @handler + async def run(self, message: str, ctx: WorkflowContext[str]) -> None: + await ctx.send_message(message, target_id="middle") + + class MiddleExecutor(Executor): + @handler + async def process(self, message: str, ctx: WorkflowContext[str]) -> None: + await ctx.send_message(message + "-processed", target_id="finish") + + class FinishExecutor(Executor): + @handler + async def finish(self, message: str, ctx: WorkflowContext[Never, str]) -> None: + await ctx.yield_output(message + "-done") + + storage = InMemoryCheckpointStorage() + + start = StartExecutor(id="start") + middle = MiddleExecutor(id="middle") + finish = FinishExecutor(id="finish") + + workflow = ( + WorkflowBuilder(max_iterations=10, start_executor=start, checkpoint_storage=storage) + .add_edge(start, middle) + .add_edge(middle, finish) + .build() + ) + + # Run workflow - this creates checkpoints at each superstep + _ = [event async for event in workflow.run("hello", stream=True)] + + # Get all checkpoints sorted by timestamp + checkpoints = sorted(await storage.list(workflow.name), key=lambda c: c.timestamp) + + # Should have multiple checkpoints (one initial + one per superstep) + assert len(checkpoints) >= 2, f"Expected at least 2 checkpoints, got {len(checkpoints)}" + + # Verify chaining: first checkpoint has no previous + assert checkpoints[0].previous_checkpoint_id is None + + # Subsequent checkpoints should chain to the previous one + for i in range(1, len(checkpoints)): + assert checkpoints[i].previous_checkpoint_id == checkpoints[i - 1].checkpoint_id, ( + f"Checkpoint {i} should chain to checkpoint {i - 1}" + ) From b7be3e8f29d5b8681e5dbe92de981ab1386b73d8 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Sun, 8 Feb 2026 16:24:10 -0800 Subject: [PATCH 05/16] Increase test coverage --- .../_workflows/_checkpoint_encoding.py | 6 +- .../core/tests/workflow/test_runner.py | 581 ++++++++++++++++++ python/uv.lock | 120 ++-- 3 files changed, 644 insertions(+), 63 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index ede7b1c28d..1f0cda8bb2 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -117,7 +117,7 @@ def _decode(value: Any) -> Any: return value -def _verify_type(obj: Any, expected_type_key: str | None) -> None: +def _verify_type(obj: Any, expected_type_key: str) -> None: """Verify that an unpickled object matches its recorded type. Args: @@ -127,10 +127,6 @@ def _verify_type(obj: Any, expected_type_key: str | None) -> None: Raises: CheckpointDecodingError: If the types don't match. """ - if expected_type_key is None: - # No type recorded (legacy checkpoint), skip verification - return - actual_type_key = _type_to_key(type(obj)) # type: ignore if actual_type_key != expected_type_key: raise CheckpointDecodingError( diff --git a/python/packages/core/tests/workflow/test_runner.py b/python/packages/core/tests/workflow/test_runner.py index bd5531d588..e4e4e67994 100644 --- a/python/packages/core/tests/workflow/test_runner.py +++ b/python/packages/core/tests/workflow/test_runner.py @@ -2,6 +2,7 @@ import asyncio from dataclasses import dataclass +from unittest.mock import AsyncMock, MagicMock import pytest @@ -9,6 +10,9 @@ AgentExecutorResponse, AgentResponse, Executor, + InMemoryCheckpointStorage, + WorkflowCheckpoint, + WorkflowCheckpointException, WorkflowContext, WorkflowConvergenceException, WorkflowEvent, @@ -16,6 +20,7 @@ WorkflowRunState, handler, ) +from agent_framework._workflows._const import EXECUTOR_STATE_KEY from agent_framework._workflows._edge import SingleEdgeGroup from agent_framework._workflows._runner import Runner from agent_framework._workflows._runner_context import ( @@ -266,3 +271,579 @@ async def run_workflow(): assert executor_a.completed_count == 1 assert executor_b.started_count == 1 assert executor_b.completed_count == 0 # Should NOT have completed due to cancellation + + +class FailingExecutor(Executor): + """An executor that fails during execution.""" + + def __init__(self, id: str, fail_on_data: int = 5): + super().__init__(id=id) + self.fail_on_data = fail_on_data + + @handler + async def handle(self, message: MockMessage, ctx: WorkflowContext[MockMessage, int]) -> None: + if message.data == self.fail_on_data: + raise RuntimeError("Simulated executor failure") + await ctx.send_message(MockMessage(data=message.data + 1)) + + +async def test_runner_iteration_exception_drains_events(): + """Test that when an executor raises an exception, events are drained before propagating.""" + executor_a = FailingExecutor(id="executor_a", fail_on_data=2) + executor_b = MockExecutor(id="executor_b") + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + ctx = InProcRunnerContext() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + await executor_a.execute( + MockMessage(data=0), + ["START"], + state, + ctx, + ) + + events: list[WorkflowEvent] = [] + with pytest.raises(RuntimeError, match="Simulated executor failure"): + async for event in runner.run_until_convergence(): + events.append(event) + + # There should be some events emitted before the failure + assert len(events) > 0 + + +async def test_runner_reset_iteration_count(): + """Test that reset_iteration_count works correctly.""" + executor_a = MockExecutor(id="executor_a") + state = State() + ctx = InProcRunnerContext() + + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + runner._iteration = 10 + + runner.reset_iteration_count() + + assert runner._iteration == 0 + + +class CheckpointingContext(InProcRunnerContext): + """A context that supports checkpointing for testing.""" + + def __init__(self, storage: InMemoryCheckpointStorage | None = None): + super().__init__() + self._storage = storage or InMemoryCheckpointStorage() + self._checkpointing_enabled = True + + def has_checkpointing(self) -> bool: + return self._checkpointing_enabled + + async def create_checkpoint( + self, + workflow_name: str, + graph_signature_hash: str, + state: State, + previous_checkpoint_id: str | None, + iteration: int, + ) -> str: + checkpoint = WorkflowCheckpoint( + workflow_name=workflow_name, + graph_signature_hash=graph_signature_hash, + state=state.export(), + previous_checkpoint_id=previous_checkpoint_id, + iteration_count=iteration, + ) + return await self._storage.save(checkpoint) + + async def load_checkpoint(self, checkpoint_id: str) -> WorkflowCheckpoint | None: + try: + return await self._storage.load(checkpoint_id) + except WorkflowCheckpointException: + return None + + async def apply_checkpoint(self, checkpoint: WorkflowCheckpoint) -> None: + # Restore messages from checkpoint + for source_id, messages in checkpoint.messages.items(): + for msg_data in messages: + await self.send_message(Message(data=msg_data, source_id=source_id)) + + +class FailingCheckpointContext(InProcRunnerContext): + """A context that fails during checkpoint creation.""" + + def has_checkpointing(self) -> bool: + return True + + async def create_checkpoint( + self, + workflow_name: str, + graph_signature_hash: str, + state: State, + previous_checkpoint_id: str | None, + iteration: int, + ) -> str: + raise RuntimeError("Simulated checkpoint failure") + + +async def test_runner_checkpoint_creation_failure(): + """Test that checkpoint creation failure is handled gracefully.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + ctx = FailingCheckpointContext() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + await executor_a.execute( + MockMessage(data=0), + ["START"], + state, + ctx, + ) + + # Should complete without raising, even though checkpointing fails + result: int | None = None + async for event in runner.run_until_convergence(): + if event.type == "output": + result = event.data + + assert result == 10 + + +async def test_runner_restore_from_checkpoint_with_external_storage(): + """Test restoring from checkpoint using external storage when context has no checkpointing.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + ctx = InProcRunnerContext() # No checkpointing enabled + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + # Create a checkpoint manually + storage = InMemoryCheckpointStorage() + checkpoint = WorkflowCheckpoint( + workflow_name="test_name", + graph_signature_hash="test_hash", + state={"test_key": "test_value"}, + iteration_count=5, + ) + checkpoint_id = await storage.save(checkpoint) + + # Restore using external storage + await runner.restore_from_checkpoint(checkpoint_id, checkpoint_storage=storage) + + assert runner._resumed_from_checkpoint is True + assert runner._iteration == 5 + assert state.get("test_key") == "test_value" + + +async def test_runner_restore_from_checkpoint_no_storage(): + """Test that restore fails when no checkpointing and no external storage.""" + state = State() + ctx = InProcRunnerContext() + + runner = Runner([], {}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="Cannot load checkpoint"): + await runner.restore_from_checkpoint("nonexistent-id") + + +async def test_runner_restore_from_checkpoint_not_found(): + """Test that restore fails when checkpoint is not found.""" + storage = InMemoryCheckpointStorage() + ctx = CheckpointingContext(storage) + state = State() + + runner = Runner([], {}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not found"): + await runner.restore_from_checkpoint("nonexistent-id") + + +async def test_runner_restore_from_checkpoint_graph_hash_mismatch(): + """Test that restore fails when graph hash doesn't match.""" + storage = InMemoryCheckpointStorage() + ctx = CheckpointingContext(storage) + state = State() + + runner = Runner([], {}, state, ctx, "test_name", graph_signature_hash="current_hash") + + # Create a checkpoint with a different graph hash + checkpoint = WorkflowCheckpoint( + workflow_name="test_name", + graph_signature_hash="different_hash", + state={}, + iteration_count=5, + ) + checkpoint_id = await storage.save(checkpoint) + + with pytest.raises(WorkflowCheckpointException, match="Workflow graph has changed"): + await runner.restore_from_checkpoint(checkpoint_id) + + +async def test_runner_restore_from_checkpoint_generic_exception(): + """Test that generic exceptions during restore are wrapped in WorkflowCheckpointException.""" + state = State() + + # Create a mock context that raises a generic exception + mock_ctx = MagicMock(spec=InProcRunnerContext) + mock_ctx.has_checkpointing.return_value = True + mock_ctx.load_checkpoint = AsyncMock(side_effect=ValueError("Unexpected error")) + + runner = Runner([], {}, state, mock_ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="Failed to restore from checkpoint"): + await runner.restore_from_checkpoint("some-id") + + +async def test_runner_restore_executor_states_invalid_states_type(): + """Test that restore fails when executor states is not a dict.""" + executor_a = MockExecutor(id="executor_a") + state = State() + state.set(EXECUTOR_STATE_KEY, "not_a_dict") + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not a dictionary"): + await runner._restore_executor_states() + + +async def test_runner_restore_executor_states_invalid_executor_id_type(): + """Test that restore fails when executor ID is not a string.""" + executor_a = MockExecutor(id="executor_a") + state = State() + state.set(EXECUTOR_STATE_KEY, {123: {"key": "value"}}) # Non-string key + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not a string"): + await runner._restore_executor_states() + + +async def test_runner_restore_executor_states_invalid_state_type(): + """Test that restore fails when executor state is not a dict[str, Any].""" + executor_a = MockExecutor(id="executor_a") + state = State() + state.set(EXECUTOR_STATE_KEY, {"executor_a": "not_a_dict"}) + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not a dict"): + await runner._restore_executor_states() + + +async def test_runner_restore_executor_states_invalid_state_keys(): + """Test that restore fails when executor state dict has non-string keys.""" + executor_a = MockExecutor(id="executor_a") + state = State() + state.set(EXECUTOR_STATE_KEY, {"executor_a": {123: "value"}}) # Non-string key in state + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not a dict"): + await runner._restore_executor_states() + + +async def test_runner_restore_executor_states_missing_executor(): + """Test that restore fails when executor is not found.""" + state = State() + state.set(EXECUTOR_STATE_KEY, {"missing_executor": {"key": "value"}}) + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not found during state restoration"): + await runner._restore_executor_states() + + +async def test_runner_set_executor_state_invalid_existing_states(): + """Test that _set_executor_state fails when existing states is not a dict.""" + executor_a = MockExecutor(id="executor_a") + state = State() + state.set(EXECUTOR_STATE_KEY, "not_a_dict") + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + with pytest.raises(WorkflowCheckpointException, match="not a dictionary"): + await runner._set_executor_state("executor_a", {"key": "value"}) + + +async def test_runner_with_pre_loop_events(): + """Test that pre-loop events are yielded correctly.""" + ctx = InProcRunnerContext() + state = State() + + runner = Runner([], {}, state, ctx, "test_name", graph_signature_hash="test_hash") + + # Add an event before running + await ctx.add_event(WorkflowEvent.output(executor_id="test_executor", data="pre-loop-output")) + + events: list[WorkflowEvent] = [] + async for event in runner.run_until_convergence(): + events.append(event) + + # Should have the pre-loop output event + output_events = [e for e in events if e.type == "output"] + assert len(output_events) == 1 + assert output_events[0].data == "pre-loop-output" + + +class EventEmittingExecutor(Executor): + """An executor that emits events during execution.""" + + @handler + async def handle(self, message: MockMessage, ctx: WorkflowContext[MockMessage, int]) -> None: + # Emit event during processing + await ctx.yield_output(f"processed-{message.data}") + if message.data < 3: + await ctx.send_message(MockMessage(data=message.data + 1)) + + +async def test_runner_drains_straggler_events(): + """Test that events emitted at the end of iteration are drained.""" + executor_a = EventEmittingExecutor(id="executor_a") + executor_b = EventEmittingExecutor(id="executor_b") + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + ctx = InProcRunnerContext() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + await executor_a.execute( + MockMessage(data=0), + ["START"], + state, + ctx, + ) + + events: list[WorkflowEvent] = [] + async for event in runner.run_until_convergence(): + events.append(event) + + # Should have output events from both executors + output_events = [e for e in events if e.type == "output"] + assert len(output_events) > 0 + + +async def test_runner_restore_executor_states_no_states(): + """Test that restore does nothing when there are no executor states.""" + executor_a = MockExecutor(id="executor_a") + state = State() # No executor states set + state.commit() + + ctx = InProcRunnerContext() + runner = Runner([], {executor_a.id: executor_a}, state, ctx, "test_name", graph_signature_hash="test_hash") + + # Should complete without error when no executor states exist + await runner._restore_executor_states() + + +async def test_runner_checkpoint_with_resumed_flag(): + """Test that resumed flag prevents initial checkpoint creation.""" + storage = InMemoryCheckpointStorage() + ctx = CheckpointingContext(storage) + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + runner._mark_resumed(5) + + # Add a message to trigger the checkpoint creation path + await ctx.send_message(Message(data=MockMessage(data=8), source_id="START")) + + await executor_a.execute( + MockMessage(data=8), + ["START"], + state, + ctx, + ) + + # Run until convergence + async for _ in runner.run_until_convergence(): + pass + + # After completing, resumed flag should be reset + assert runner._resumed_from_checkpoint is False + + +class ExecutorThatFailsWithEvents(Executor): + """An executor that emits events and then raises an exception after receiving messages.""" + + def __init__(self, id: str, runner_ctx: RunnerContext, fail_on_iteration: int = 1): + super().__init__(id=id) + self._runner_ctx = runner_ctx + self._fail_on_iteration = fail_on_iteration + self._iteration_count = 0 + + @handler + async def handle(self, message: MockMessage, ctx: WorkflowContext[MockMessage, int]) -> None: + self._iteration_count += 1 + # First emit an output event to the workflow context + await ctx.yield_output(f"output-before-failure-{message.data}") + # Add some events directly to the runner context + await self._runner_ctx.add_event(WorkflowEvent.output(executor_id=self.id, data="pending-event")) + # Fail on the specified iteration + if self._iteration_count >= self._fail_on_iteration: + raise RuntimeError("Executor failed with pending events") + # Otherwise, send to next + await ctx.send_message(MockMessage(data=message.data + 1)) + + +class PassthroughExecutor(Executor): + """An executor that passes messages through to the failing executor.""" + + @handler + async def handle(self, message: MockMessage, ctx: WorkflowContext[MockMessage, int]) -> None: + await ctx.send_message(MockMessage(data=message.data)) + + +async def test_runner_drains_events_on_iteration_exception(): + """Test that events are drained when iteration task raises an exception (lines 128-129).""" + ctx = InProcRunnerContext() + # executor_b will fail with pending events after receiving a message + executor_a = PassthroughExecutor(id="executor_a") + executor_b = ExecutorThatFailsWithEvents(id="executor_b", runner_ctx=ctx, fail_on_iteration=1) + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + # Execute through executor_a which will pass to executor_b during the runner iteration + await executor_a.execute( + MockMessage(data=0), + ["START"], + state, + ctx, + ) + + events: list[WorkflowEvent] = [] + with pytest.raises(RuntimeError, match="Executor failed with pending events"): + async for event in runner.run_until_convergence(): + events.append(event) + + # Events should include the ones emitted before the exception + output_events = [e for e in events if e.type == "output"] + # Should have drained the pending events before propagating the exception + assert len(output_events) >= 1 + + +class SlowEventEmittingExecutor(Executor): + """An executor that emits events with delays to test straggler event draining.""" + + def __init__(self, id: str, iterations_to_emit: int = 2): + super().__init__(id=id) + self.iterations_to_emit = iterations_to_emit + self.current_iteration = 0 + + @handler + async def handle(self, message: MockMessage, ctx: WorkflowContext[MockMessage, int]) -> None: + self.current_iteration += 1 + # Emit output event + await ctx.yield_output(f"iteration-{self.current_iteration}") + # Continue sending messages until we reach the target iterations + if self.current_iteration < self.iterations_to_emit: + await ctx.send_message(MockMessage(data=message.data + 1)) + + +async def test_runner_drains_straggler_events_at_iteration_end(): + """Test that events emitted at the very end of iteration are drained (lines 135-136).""" + # Create executors that ping-pong messages and emit events + executor_a = SlowEventEmittingExecutor(id="executor_a", iterations_to_emit=3) + executor_b = SlowEventEmittingExecutor(id="executor_b", iterations_to_emit=3) + + edges = [ + SingleEdgeGroup(executor_a.id, executor_b.id), + SingleEdgeGroup(executor_b.id, executor_a.id), + ] + + executors: dict[str, Executor] = { + executor_a.id: executor_a, + executor_b.id: executor_b, + } + state = State() + ctx = InProcRunnerContext() + + runner = Runner(edges, executors, state, ctx, "test_name", graph_signature_hash="test_hash") + + await executor_a.execute( + MockMessage(data=0), + ["START"], + state, + ctx, + ) + + events: list[WorkflowEvent] = [] + async for event in runner.run_until_convergence(): + events.append(event) + + # Check that output events were collected (including straggler events) + output_events = [e for e in events if e.type == "output"] + # We should have output events from both executors + assert len(output_events) >= 2 diff --git a/python/uv.lock b/python/uv.lock index 4759a01f66..07ece23076 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -916,7 +916,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.78.0" +version = "0.79.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -928,9 +928,9 @@ dependencies = [ { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ec/51/32849a48f9b1cfe80a508fd269b20bd8f0b1357c70ba092890fde5a6a10b/anthropic-0.78.0.tar.gz", hash = "sha256:55fd978ab9b049c61857463f4c4e9e092b24f892519c6d8078cee1713d8af06e", size = 509136, upload-time = "2026-02-05T17:52:04.986Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/b1/91aea3f8fd180d01d133d931a167a78a3737b3fd39ccef2ae8d6619c24fd/anthropic-0.79.0.tar.gz", hash = "sha256:8707aafb3b1176ed6c13e2b1c9fb3efddce90d17aee5d8b83a86c70dcdcca871", size = 509825, upload-time = "2026-02-07T18:06:18.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/03/2f50931a942e5e13f80e24d83406714672c57964be593fc046d81369335b/anthropic-0.78.0-py3-none-any.whl", hash = "sha256:2a9887d2e99d1b0f9fe08857a1e9fe5d2d4030455dbf9ac65aab052e2efaeac4", size = 405485, upload-time = "2026-02-05T17:52:03.674Z" }, + { url = "https://files.pythonhosted.org/packages/95/b2/cc0b8e874a18d7da50b0fda8c99e4ac123f23bf47b471827c5f6f3e4a767/anthropic-0.79.0-py3-none-any.whl", hash = "sha256:04cbd473b6bbda4ca2e41dd670fe2f829a911530f01697d0a1e37321eb75f3cf", size = 405918, upload-time = "2026-02-07T18:06:20.246Z" }, ] [[package]] @@ -1426,19 +1426,19 @@ wheels = [ [[package]] name = "claude-agent-sdk" -version = "0.1.31" +version = "0.1.33" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "mcp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/df/071dce5803c4db8cd53708bcda3b6022c1c4b68fc00e9007593309515286/claude_agent_sdk-0.1.31.tar.gz", hash = "sha256:b68c681083d7cc985dd3e48f73aabf459f056c1a7e1c5b9c47033c6af94da1a1", size = 61191, upload-time = "2026-02-06T02:01:51.043Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/aa/5c417ef464d3fa712d830cd56a9a79aef8dfb5bc3414aae4bae136cf4e73/claude_agent_sdk-0.1.33.tar.gz", hash = "sha256:134bf403bb7553d829dadec42c30ecef340f5d4ad1595c1bdef933a9ca3129cf", size = 61196, upload-time = "2026-02-07T19:19:53.372Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/7c/e249a3b4215e28a9722b3d9ab6057bceeeaa2b948530f022065ef2154555/claude_agent_sdk-0.1.31-py3-none-macosx_11_0_arm64.whl", hash = "sha256:801bacfe4192782a7cc7b61b0d23a57f061c069993dd3dfa8109aa2e7050a530", size = 54284257, upload-time = "2026-02-06T02:01:35.61Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a8/1a8288736aeafcc48e3dcb3326ec7f487dbf89ebba77d526e9464786a299/claude_agent_sdk-0.1.31-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:0b608e0cbfcedcb827427e6d16a73fe573d58e7f93e15f95435066feacbe6511", size = 68462461, upload-time = "2026-02-06T02:01:40.074Z" }, - { url = "https://files.pythonhosted.org/packages/26/7a/7dcd0b77263ed55b17554fa3a67a6772b788e7048a524fd06c9baa970564/claude_agent_sdk-0.1.31-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:d0cb30e026a22246e84d9237d23bb4df20be5146913a04d2802ddd37d4f8b8c9", size = 70173234, upload-time = "2026-02-06T02:01:44.486Z" }, - { url = "https://files.pythonhosted.org/packages/37/a5/4a8de7a9738f454b54aa97557f0fba9c74b0901ea418597008c668243fea/claude_agent_sdk-0.1.31-py3-none-win_amd64.whl", hash = "sha256:8ceca675c2770ad739bd1208362059a830e91c74efcf128045b5a7af14d36f2b", size = 72366975, upload-time = "2026-02-06T02:01:48.647Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/1ac5d536013b1e38b37d71928a0db214cbd47e7bb815c21141dbc6dd93b6/claude_agent_sdk-0.1.33-py3-none-macosx_11_0_arm64.whl", hash = "sha256:57886a2dd124e5b3c9e12ec3e4841742ab3444d1e428b45ceaec8841c96698fa", size = 54323456, upload-time = "2026-02-07T19:19:39.407Z" }, + { url = "https://files.pythonhosted.org/packages/54/36/79c3feb3f2c95591b80de39a1d3097d30bc3a9a84fcff6422f5434f1187a/claude_agent_sdk-0.1.33-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:ea0f1e4fadeec766000122723c406a6f47c6210ea11bb5cc0c88af11ef7c940c", size = 69106772, upload-time = "2026-02-07T19:19:42.998Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/64d22ae767154da4629004a80e9f59f71b5070d55fcfade4efdfb06b1f7a/claude_agent_sdk-0.1.33-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:0ecd822c577b4ea2a52e51146a24dcea73eb69ff366bdb875785dadb116d593b", size = 69688592, upload-time = "2026-02-07T19:19:46.629Z" }, + { url = "https://files.pythonhosted.org/packages/b8/aa/83677a3d42b047bcacf4dbe730bf5189a106b5b6746ee83f6920e5d9729a/claude_agent_sdk-0.1.33-py3-none-win_amd64.whl", hash = "sha256:a9fbd09d8f947005e087340ecd0706ed35639c946b4bd49429d3132db4cb3751", size = 72211078, upload-time = "2026-02-07T19:19:50.528Z" }, ] [[package]] @@ -1979,7 +1979,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.128.2" +version = "0.128.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1988,9 +1988,9 @@ dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4f/6e/45fb5390d46d7918426ea1c1ec4b06c1d3fd70be4a47a690ccb4f1f9438a/fastapi-0.128.2.tar.gz", hash = "sha256:7db9eb891866ac3a08e03f844b99e343a2c1cc41247e68e006c90b38d2464ea1", size = 376129, upload-time = "2026-02-05T19:48:33.957Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/d4/811e7283aaaa84f1e7bd55fb642b58f8c01895e4884a9b7628cb55e00d63/fastapi-0.128.5.tar.gz", hash = "sha256:a7173579fc162d6471e3c6fbd9a4b7610c7a3b367bcacf6c4f90d5d022cab711", size = 374636, upload-time = "2026-02-08T10:22:30.493Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/f2/80df24108572630bb2adef3d97f1e774b18ec25bfbab5528f36cba6478c0/fastapi-0.128.2-py3-none-any.whl", hash = "sha256:55bfd9490ca0125707d80e785583c2dc57840bb66e3a0bbc087d20c364964dc0", size = 104032, upload-time = "2026-02-05T19:48:32.118Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e0/511972dba23ee76c0e9d09d1ae95e916fc8ebce5322b2b8b65a481428b10/fastapi-0.128.5-py3-none-any.whl", hash = "sha256:bceec0de8aa6564599c5bcc0593b0d287703562c848271fca8546fd2c87bf4dd", size = 103677, upload-time = "2026-02-08T10:22:28.919Z" }, ] [[package]] @@ -2351,16 +2351,20 @@ wheels = [ [[package]] name = "github-copilot-sdk" -version = "0.1.22" +version = "0.1.23" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/b7/ae720a503c9b329f8c95036a04fae8e023db8dcdce9d24382259865f0760/github_copilot_sdk-0.1.22.tar.gz", hash = "sha256:8ea4534f0c8ab0fa04e0fec4c3ebd42d737cf7772277e4f8eb58a9fadac6bdb5", size = 97324, upload-time = "2026-02-05T17:33:33.726Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/2e/68aa28018778fa86a8392b37c6a883d7a9a24b715ba5baa470ce018f1542/github_copilot_sdk-0.1.22-py3-none-any.whl", hash = "sha256:f75d84dd2633138834330597400b28fefbf8bd75541f78083831f58c9bdde81a", size = 44149, upload-time = "2026-02-05T17:33:31.948Z" }, + { url = "https://files.pythonhosted.org/packages/1a/69/08f478521739e3fbf6c7f7a24ba503c8f80f735be17ef0b08f42b12511c4/github_copilot_sdk-0.1.23-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9b761445e47b757c63e3ad5596dbdc4fb84720612cad00a12425af056fbadb48", size = 57873198, upload-time = "2026-02-06T18:10:44.07Z" }, + { url = "https://files.pythonhosted.org/packages/d4/6a/c0262ea649a89518e3897d7c464e88aa623d7bb9a6861b7674fda5033c4c/github_copilot_sdk-0.1.23-py3-none-macosx_11_0_arm64.whl", hash = "sha256:27277aca84d767336590a426a48a00ded20533e6508be97c265eb3b64f6e921c", size = 54627888, upload-time = "2026-02-06T18:10:48.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fb/63f147993c840c6b863250f10967dbc45095ab9d2a9ad1c86ca0588c65d5/github_copilot_sdk-0.1.23-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:b929027edeb147683c6625c8a6b90e7c6d64a72ea0567cc8e56c5c66bec7a37d", size = 60760946, upload-time = "2026-02-06T18:10:51.574Z" }, + { url = "https://files.pythonhosted.org/packages/07/2f/0fdeb797e26da3f57c4a84bf3bdd6db9ba4e8974450c8ea0f32fd81c48ba/github_copilot_sdk-0.1.23-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:31d1adb09f342c8a466f64e8b81e6470fee6013d31e516cec7a33a44b6b0a4b4", size = 58941430, upload-time = "2026-02-06T18:10:55.502Z" }, + { url = "https://files.pythonhosted.org/packages/45/9e/4e569de749066fb4c796954c5e01118d52e2cd05b42bf7a1451660851a8e/github_copilot_sdk-0.1.23-py3-none-win_amd64.whl", hash = "sha256:1e1c889aab857feadda546842c4c4730ddb0d63f04aa5ccaae2d83f4bc348eb7", size = 57636441, upload-time = "2026-02-06T18:10:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/0e/65/15c94c7ea647b42123124e6f0daa7f93df630189188cf9e4ce36c5f799d9/github_copilot_sdk-0.1.23-py3-none-win_arm64.whl", hash = "sha256:d1ab5816b0ebd6507ddc6e11ccb5aac4eef2069f2b834b39fcceb909b0cf80bf", size = 55149715, upload-time = "2026-02-06T18:11:02.84Z" }, ] [[package]] @@ -3108,7 +3112,7 @@ wheels = [ [[package]] name = "langfuse" -version = "3.12.1" +version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3122,9 +3126,9 @@ dependencies = [ { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/c28a09b696a1b908cf59b201d01e69066aeab804163d8dba055811790ed5/langfuse-3.12.1.tar.gz", hash = "sha256:da3bf4c0469eab4305f88a63cbb5ef89cf7542abbbcc9136a35c1bc708810520", size = 232768, upload-time = "2026-01-27T06:11:24.648Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/d0/744e5613c728427330ac2049da0f54fc313e8bf84622f71b025bfba65496/langfuse-3.13.0.tar.gz", hash = "sha256:dacea8111ca4442e97dbfec4f8d676cf9709b35357a26e468f8887b95de0012f", size = 233420, upload-time = "2026-02-06T19:54:14.415Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/51/a5752417d704831f8c9fc4d7ec070342dee21d781d92e6fe937e60912e61/langfuse-3.12.1-py3-none-any.whl", hash = "sha256:ccf091ed6b6e0d9d4dbc95ad5cbb0f60c4452ce95b18c114ed5896f4546af38f", size = 416999, upload-time = "2026-01-27T06:11:22.657Z" }, + { url = "https://files.pythonhosted.org/packages/3d/63/148382e8e79948f7e5c9c137288e504bb88117574eb7e7c886b4fb470b4b/langfuse-3.13.0-py3-none-any.whl", hash = "sha256:71912ddac1cc831a65df895eae538a556f564c094ae51473e747426e9ded1a9d", size = 417626, upload-time = "2026-02-06T19:54:12.547Z" }, ] [[package]] @@ -3202,7 +3206,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.81.8" +version = "1.81.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3218,9 +3222,9 @@ dependencies = [ { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/1d/e8f95dd1fc0eed36f2698ca82d8a0693d5388c6f2f1718f3f5ed472daaf4/litellm-1.81.8.tar.gz", hash = "sha256:5cc6547697748b8ca38d17d755662871da125df6e378cc987eaf2208a15626fb", size = 14066801, upload-time = "2026-02-05T05:56:03.37Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/8f/2a08f3d86fd008b4b02254649883032068378a8551baed93e8d9dcbbdb5d/litellm-1.81.9.tar.gz", hash = "sha256:a2cd9bc53a88696c21309ef37c55556f03c501392ed59d7f4250f9932917c13c", size = 16276983, upload-time = "2026-02-07T21:14:24.473Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/5a/6f391c2f251553dae98b6edca31c070d7e2291cef6153ae69e0688159093/litellm-1.81.8-py3-none-any.whl", hash = "sha256:78cca92f36bc6c267c191d1fe1e2630c812bff6daec32c58cade75748c2692f6", size = 12286316, upload-time = "2026-02-05T05:56:00.248Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/672fc06c8a2803477e61e0de383d3c6e686e0f0fc62789c21f0317494076/litellm-1.81.9-py3-none-any.whl", hash = "sha256:24ee273bc8a62299fbb754035f83fb7d8d44329c383701a2bd034f4fd1c19084", size = 14433170, upload-time = "2026-02-07T21:14:21.469Z" }, ] [package.optional-dependencies] @@ -3253,20 +3257,20 @@ proxy = [ [[package]] name = "litellm-enterprise" -version = "0.1.27" +version = "0.1.31" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/b5/2304eed58f0142b3570c50580b451db9b7709012d5b436c2100783ae2220/litellm_enterprise-0.1.27.tar.gz", hash = "sha256:aa40c87f7c8df64beb79e75f71e1b5c0a458350efa68527e3491e6f27f2cbd57", size = 46829, upload-time = "2025-12-18T00:01:33.398Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/ef/4d7baae0503cbab015cb03238633887725b553a22adaf9a011b35cd7338f/litellm_enterprise-0.1.31.tar.gz", hash = "sha256:684d09daa3ededf1394df4ec1439aab606b884b68af2c92c478af0784a30e588", size = 50205, upload-time = "2026-02-06T05:31:55.311Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/23/ec61a6aa76b6938d3de8cad206875b0500e1df234fa3535b282b1a4850b5/litellm_enterprise-0.1.27-py3-none-any.whl", hash = "sha256:41b9d41d04123f492060a742091006dc1d182b54ce3a1c0e18ee75d623c63e91", size = 108107, upload-time = "2025-12-18T00:01:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/be/8a/1e06af78b18d62e1dbb457f60cc78a82543217db81cb780af080b4dd985d/litellm_enterprise-0.1.31-py3-none-any.whl", hash = "sha256:7b0f750343e6f28c88e1557c656a6bea50fa6c8990a13e86ea20497cf666c79b", size = 112741, upload-time = "2026-02-06T05:31:54.311Z" }, ] [[package]] name = "litellm-proxy-extras" -version = "0.4.30" +version = "0.4.33" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/a1/00d2e91a7a91335a7d7f43dfb8316142879782c22ef59eca5d0ced055bf0/litellm_proxy_extras-0.4.30.tar.gz", hash = "sha256:5d32f8dc3d37d36fb15ab6995fea706dd8a453ff7f12e70b47cba35e5368da10", size = 23752, upload-time = "2026-02-05T03:54:00.351Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/4f/1e8644cdda2892d2dc8151153ca4d8a6fc44000363677a52f9988e56713a/litellm_proxy_extras-0.4.33.tar.gz", hash = "sha256:133dc5476b540d99e75d4baef622267e7344ced97737c174679baff429e7f212", size = 23973, upload-time = "2026-02-07T19:07:32.67Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/80/5b7ae7b39a79ca79722dd9049b3b4227b4540cb97006c8ef26c43af74db8/litellm_proxy_extras-0.4.30-py3-none-any.whl", hash = "sha256:0b7df68f0968eb817462b847eaee81bba23d935adb2e84d2e342a77711887051", size = 51217, upload-time = "2026-02-05T03:54:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/b7/c0/b9960391b983306c39f1fa28e2eedf5d0e2048879fde8707a2d80896ed10/litellm_proxy_extras-0.4.33-py3-none-any.whl", hash = "sha256:bebea1b091490df19cfa773bd311f08254dee5bb53f92d282b7a5bdfba936334", size = 52533, upload-time = "2026-02-07T19:07:31.665Z" }, ] [[package]] @@ -4046,7 +4050,7 @@ wheels = [ [[package]] name = "openai-agents" -version = "0.8.0" +version = "0.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4057,9 +4061,9 @@ dependencies = [ { name = "types-requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/57/724c73f158dec760a6e689e2415ab1b85bc5ff21508d82af91d23c9580e9/openai_agents-0.8.0.tar.gz", hash = "sha256:0ea66356ace1e158b09ab173534cacbc435d4a06e3203d04978dd69531729fc3", size = 2342265, upload-time = "2026-02-05T02:51:52.293Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/43/ccea6b70e3c4399eea24a7e0c0cde9e05727781e5b7dd2c00e2cebe09961/openai_agents-0.8.1.tar.gz", hash = "sha256:32dc6124359397e5775e936e621892576a0b2f5c88b3fc548a084334f6918541", size = 2373798, upload-time = "2026-02-06T22:44:24.24Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/61/7c590176c664845e75961a7755f58997b404fb633073a9ddba1151582033/openai_agents-0.8.0-py3-none-any.whl", hash = "sha256:1a8b63f10f8828fb5516fa4917ee26d03956893f8f09e38cfcf33ec60ffcd546", size = 373746, upload-time = "2026-02-05T02:51:50.501Z" }, + { url = "https://files.pythonhosted.org/packages/f6/3f/49ff704c933cf2a3467c040b13231258bb1f2fa66d995c3b62b3a13c2eb4/openai_agents-0.8.1-py3-none-any.whl", hash = "sha256:a29916690f4ca2d67c0d782abbff99350ce2a7cee0067b8dd2c2297e38a3714a", size = 376922, upload-time = "2026-02-06T22:44:21.977Z" }, ] [[package]] @@ -4643,44 +4647,44 @@ wheels = [ [[package]] name = "poethepoet" -version = "0.40.0" +version = "0.41.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pastel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/9d/054c8435b03324ed9abd5d5ab8c45065b1f42c23952cd23f13a5921d8465/poethepoet-0.40.0.tar.gz", hash = "sha256:91835f00d03d6c4f0e146f80fa510e298ad865e7edd27fe4cb9c94fdc090791b", size = 81114, upload-time = "2026-01-05T19:09:13.116Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/b9/fa92286560f70eaa40d473ea48376d20c6c21f63627d33c6bb1c5e385175/poethepoet-0.41.0.tar.gz", hash = "sha256:dcaad621dc061f6a90b17d091bebb9ca043d67bfe9bd6aa4185aea3ebf7ff3e6", size = 87780, upload-time = "2026-02-08T20:45:36.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/bc/73327d12b176abea7a3c6c7d760e1a953992f7b59d72c0354e39d7a353b5/poethepoet-0.40.0-py3-none-any.whl", hash = "sha256:afd276ae31d5c53573c0c14898118d4848ccee3709b6b0be6a1c6cbe522bbc8a", size = 106672, upload-time = "2026-01-05T19:09:11.536Z" }, + { url = "https://files.pythonhosted.org/packages/5d/5e/0b83e0222ce5921b3f9081eeca8c6fb3e1cfd5ca0d06338adf93b28ce061/poethepoet-0.41.0-py3-none-any.whl", hash = "sha256:4bab9fd8271664c5d21407e8f12827daeb6aa484dc6cc7620f0c3b4e62b42ee4", size = 113590, upload-time = "2026-02-08T20:45:34.697Z" }, ] [[package]] name = "polars" -version = "1.38.0" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "polars-runtime-32", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/56/bce1c1244431b0ebc4e5d413fdbcf7f85ec30fc98595fcfb7328a869d794/polars-1.38.0.tar.gz", hash = "sha256:4dee569944c613d8c621eb709e452354e1570bd3d47ccb2d3d36681fb1bd2cf6", size = 717801, upload-time = "2026-02-04T12:00:34.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5e/208a24471a433bcd0e9a6889ac49025fd4daad2815c8220c5bd2576e5f1b/polars-1.38.1.tar.gz", hash = "sha256:803a2be5344ef880ad625addfb8f641995cfd777413b08a10de0897345778239", size = 717667, upload-time = "2026-02-06T18:13:23.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/47/61e7a47f77e321aa1cbf4141cc60df9d6e63b9f469c5525226535552a04c/polars-1.38.0-py3-none-any.whl", hash = "sha256:d7a31b47da8c9522aa38908c46ac72eab8eaf0c992e024f9c95fedba4cbe7759", size = 810116, upload-time = "2026-02-04T11:59:21.425Z" }, + { url = "https://files.pythonhosted.org/packages/0a/49/737c1a6273c585719858261753da0b688454d1b634438ccba8a9c4eb5aab/polars-1.38.1-py3-none-any.whl", hash = "sha256:a29479c48fed4984d88b656486d221f638cba45d3e961631a50ee5fdde38cb2c", size = 810368, upload-time = "2026-02-06T18:11:55.819Z" }, ] [[package]] name = "polars-runtime-32" -version = "1.38.0" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/8d/8f5764d722ad16ddb1b6db997aca7a41110dad446000ee2e3f8f48503f0e/polars_runtime_32-1.38.0.tar.gz", hash = "sha256:69ba986bff34f70d7eab931005e5d81dd4dc6c5c12e3532a4bd0fc7022671692", size = 2812354, upload-time = "2026-02-04T12:00:36.041Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/4b/04d6b3fb7cf336fbe12fbc4b43f36d1783e11bb0f2b1e3980ec44878df06/polars_runtime_32-1.38.1.tar.gz", hash = "sha256:04f20ed1f5c58771f34296a27029dc755a9e4b1390caeaef8f317e06fdfce2ec", size = 2812631, upload-time = "2026-02-06T18:13:25.206Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/eb/a8981ec070dd9bea9569292f38b0268159e39f63f5376ffae27a0c7d2ee7/polars_runtime_32-1.38.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:03f43c10a419837b89a493e946090cdaee08ce50a8d1933f2e8ac3a6874d7db4", size = 44106460, upload-time = "2026-02-04T11:59:23.546Z" }, - { url = "https://files.pythonhosted.org/packages/64/de/c2a2037b2d658b91067647b99be43bc91af3a7b4868e32efcc118f383add/polars_runtime_32-1.38.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:d664e53cba734e9fbed87d1c33078a13b5fc39b3e8790318fc65fa78954ea2d0", size = 40228076, upload-time = "2026-02-04T11:59:26.497Z" }, - { url = "https://files.pythonhosted.org/packages/4a/0f/9204210e7d05b3953813bb09627585c161221f512f2672b31065a02f4727/polars_runtime_32-1.38.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c073c7b7e6e559769e10cdadbafce86d32b0709d5790de920081c6129acae507", size = 41988273, upload-time = "2026-02-04T11:59:29.01Z" }, - { url = "https://files.pythonhosted.org/packages/89/64/4c5dbb1c2d2c025f8e7c7e433bd343c4fc955ceadd087a7ad456de8668f8/polars_runtime_32-1.38.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8806ddb684b17ae8b0bcb91d8d5ba361b04b0a31d77ce7f861d16b47734b3012", size = 45749469, upload-time = "2026-02-04T11:59:32.292Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f8/da2d324d686b1fc438dfb721677fb44f7f5aab6ae0d1fa5b281e986fde82/polars_runtime_32-1.38.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c7b41163189bd3305fe2307e66fe478b35c4faa467777d74c32b70b52292039b", size = 42159740, upload-time = "2026-02-04T11:59:35.608Z" }, - { url = "https://files.pythonhosted.org/packages/37/88/fe02e4450e9b582ea6f1a7490921208a9c3a0a1efdf976aadbaa4cae73bb/polars_runtime_32-1.38.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e944f924a99750909299fa701edb07a63a5988e5ee58d673993f3d9147a22276", size = 45327635, upload-time = "2026-02-04T11:59:38.28Z" }, - { url = "https://files.pythonhosted.org/packages/68/db/9bb8007a4bea76b476537740ed18c8bccd809faa390ca1443134e98f8b60/polars_runtime_32-1.38.0-cp310-abi3-win_amd64.whl", hash = "sha256:46fbfb4ee6f8e1914dc0babfb6a138ead552db05a2d9e531c1fb19411b1a6744", size = 45670197, upload-time = "2026-02-04T11:59:41.297Z" }, - { url = "https://files.pythonhosted.org/packages/58/78/28f793ec2e1cff72c0ced1bc9186c9b4dbfe44ca8316df11b2aa8039764c/polars_runtime_32-1.38.0-cp310-abi3-win_arm64.whl", hash = "sha256:ed0e6d7a546de9179e5715bffe9d3b94ba658d5655bbbf44943e138e061dcc90", size = 41637784, upload-time = "2026-02-04T11:59:44.396Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a2/a00defbddadd8cf1042f52380dcba6b6592b03bac8e3b34c436b62d12d3b/polars_runtime_32-1.38.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:18154e96044724a0ac38ce155cf63aa03c02dd70500efbbf1a61b08cadd269ef", size = 44108001, upload-time = "2026-02-06T18:11:58.127Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/599ff3709e6a303024efd7edfd08cf8de55c6ac39527d8f41cbc4399385f/polars_runtime_32-1.38.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c49acac34cc4049ed188f1eb67d6ff3971a39b4af7f7b734b367119970f313ac", size = 40230140, upload-time = "2026-02-06T18:12:01.181Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8c/3ac18d6f89dc05fe2c7c0ee1dc5b81f77a5c85ad59898232c2500fe2ebbf/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fef2ef2626a954e010e006cc8e4de467ecf32d08008f130cea1c78911f545323", size = 41994039, upload-time = "2026-02-06T18:12:04.332Z" }, + { url = "https://files.pythonhosted.org/packages/f2/5a/61d60ec5cc0ab37cbd5a699edb2f9af2875b7fdfdfb2a4608ca3cc5f0448/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5f7a8125e2d50e2e060296551c929aec09be23a9edcb2b12ca923f555a5ba", size = 45755804, upload-time = "2026-02-06T18:12:07.846Z" }, + { url = "https://files.pythonhosted.org/packages/91/54/02cd4074c98c361ccd3fec3bcb0bd68dbc639c0550c42a4436b0ff0f3ccf/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:10d19cd9863e129273b18b7fcaab625b5c8143c2d22b3e549067b78efa32e4fa", size = 42159605, upload-time = "2026-02-06T18:12:10.919Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/b2a5e720cc56eaa38b4518e63aa577b4bbd60e8b05a00fe43ca051be5879/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61e8d73c614b46a00d2f853625a7569a2e4a0999333e876354ac81d1bf1bb5e2", size = 45336615, upload-time = "2026-02-06T18:12:14.074Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8d/ee2e4b7de948090cfb3df37d401c521233daf97bfc54ddec5d61d1d31618/polars_runtime_32-1.38.1-cp310-abi3-win_amd64.whl", hash = "sha256:08c2b3b93509c1141ac97891294ff5c5b0c548a373f583eaaea873a4bf506437", size = 45680732, upload-time = "2026-02-06T18:12:19.097Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/72c216f4ab0c82b907009668f79183ae029116ff0dd245d56ef58aac48e7/polars_runtime_32-1.38.1-cp310-abi3-win_arm64.whl", hash = "sha256:6d07d0cc832bfe4fb54b6e04218c2c27afcfa6b9498f9f6bbf262a00d58cc7c4", size = 41639413, upload-time = "2026-02-06T18:12:22.044Z" }, ] [[package]] @@ -5521,7 +5525,7 @@ wheels = [ [[package]] name = "redisvl" -version = "0.13.2" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpath-ng", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5534,9 +5538,9 @@ dependencies = [ { name = "redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/d6/8f3235b272e3a2370698d7524aad2dec15f53c5be5d6726ba41056844f69/redisvl-0.13.2.tar.gz", hash = "sha256:f34c4350922ac469c45d90b5db65c49950e6aa8706331931b000f631ff9a0f4a", size = 737736, upload-time = "2025-12-19T09:22:07.787Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/45/1c5b308f68b01c4e33590a8e1445f43c51292917b28c2def8deaa5b3dc5b/redisvl-0.14.0.tar.gz", hash = "sha256:7a84c46858dbc86943e64ffe8590013684d03d79b72a634d10c02ce5d1c02335", size = 759829, upload-time = "2026-02-06T15:48:19.384Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/93/81ea5c45637ce7fe2fdaf214d5e1b91afe96a472edeb9b659e24d3710dfb/redisvl-0.13.2-py3-none-any.whl", hash = "sha256:dd998c6acc54f13526d464ad6b6e6f0c4cf6985fb2c7a1655bdf8ed8e57a4c01", size = 192760, upload-time = "2025-12-19T09:22:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/24/e9/264455caf42501b2b0747ac4819c7d0a2b458fad5e4e1f7610b6383d6d74/redisvl-0.14.0-py3-none-any.whl", hash = "sha256:85ec38f414427260da82ef20653a62d4c2626b97672c5c950616e5dde3cf0d0b", size = 196705, upload-time = "2026-02-06T15:48:17.636Z" }, ] [[package]] @@ -6248,11 +6252,11 @@ wheels = [ [[package]] name = "setuptools" -version = "80.10.2" +version = "82.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/95/faf61eb8363f26aa7e1d762267a8d602a1b26d4f3a1e758e92cb3cb8b054/setuptools-80.10.2.tar.gz", hash = "sha256:8b0e9d10c784bf7d262c4e5ec5d4ec94127ce206e8738f29a437945fbc219b70", size = 1200343, upload-time = "2026-01-25T22:38:17.252Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/f3/748f4d6f65d1756b9ae577f329c951cda23fb900e4de9f70900ced962085/setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb", size = 1144893, upload-time = "2026-02-08T15:08:40.206Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/b8/f1f62a5e3c0ad2ff1d189590bfa4c46b4f3b6e49cef6f26c6ee4e575394d/setuptools-80.10.2-py3-none-any.whl", hash = "sha256:95b30ddfb717250edb492926c92b5221f7ef3fbcc2b07579bcd4a27da21d0173", size = 1064234, upload-time = "2026-01-25T22:38:15.216Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, ] [[package]] @@ -6461,15 +6465,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.50.0" +version = "0.52.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] [[package]] @@ -6529,11 +6533,11 @@ dependencies = [ [[package]] name = "tenacity" -version = "9.1.3" +version = "9.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/4a/c3357c8742f361785e3702bb4c9c68c4cb37a80aa657640b820669be5af1/tenacity-9.1.3.tar.gz", hash = "sha256:a6724c947aa717087e2531f883bde5c9188f603f6669a9b8d54eb998e604c12a", size = 49002, upload-time = "2026-02-05T06:33:12.866Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/6b/cdc85edb15e384d8e934aad89638cc8646e118c80de94c60125d0fc0a185/tenacity-9.1.3-py3-none-any.whl", hash = "sha256:51171cfc6b8a7826551e2f029426b10a6af189c5ac6986adcd7eb36d42f17954", size = 28858, upload-time = "2026-02-05T06:33:11.219Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, ] [[package]] From 28e0304f539d42091953b601617eb47329061e0c Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Sun, 8 Feb 2026 18:49:58 -0800 Subject: [PATCH 06/16] Fix formatting --- .../agent_framework/_workflows/_checkpoint.py | 14 ++++---- .../tests/workflow/test_agent_executor.py | 2 +- .../core/tests/workflow/test_checkpoint.py | 36 +++++++++---------- .../workflow/test_checkpoint_validation.py | 4 +-- .../test_request_info_and_response.py | 2 +- .../core/tests/workflow/test_sub_workflow.py | 2 +- .../core/tests/workflow/test_workflow.py | 6 ++-- .../tests/workflow/test_workflow_agent.py | 2 +- .../devui/agent_framework_devui/_executor.py | 2 +- .../devui/agent_framework_devui/_server.py | 2 +- 10 files changed, 36 insertions(+), 36 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 1aca04c0e0..af73a7d854 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -122,7 +122,7 @@ async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: """ ... - async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name. Args: @@ -155,7 +155,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: """ ... - async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs for a given workflow name. Args: @@ -188,7 +188,7 @@ async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: return checkpoint raise WorkflowCheckpointException(f"No checkpoint found with ID {checkpoint_id}") - async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name.""" return [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] @@ -209,7 +209,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint - async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" return [cp.checkpoint_id for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] @@ -288,7 +288,7 @@ def _read() -> dict[str, Any]: logger.info(f"Loaded checkpoint {checkpoint_id} from {file_path}") return checkpoint - async def list(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name. Args: @@ -345,14 +345,14 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: Returns: The latest WorkflowCheckpoint object for the specified workflow name, or None if no checkpoints exist. """ - checkpoints = await self.list(workflow_name) + checkpoints = await self.list_checkpoints(workflow_name) if not checkpoints: return None latest_checkpoint = max(checkpoints, key=lambda cp: cp.timestamp) logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint - async def list_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs for a given workflow name. Args: diff --git a/python/packages/core/tests/workflow/test_agent_executor.py b/python/packages/core/tests/workflow/test_agent_executor.py index 5f089f7fa9..e46d1864c6 100644 --- a/python/packages/core/tests/workflow/test_agent_executor.py +++ b/python/packages/core/tests/workflow/test_agent_executor.py @@ -84,7 +84,7 @@ async def test_agent_executor_checkpoint_stores_and_restores_state() -> None: assert initial_agent.call_count == 1 # Verify checkpoint was created - checkpoints = await storage.list(wf.name) + checkpoints = await storage.list_checkpoints(wf.name) assert len(checkpoints) > 0 # Get the second checkpoint which should contain the state after processing diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index c026d6dae5..cccd8907f3 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -100,33 +100,33 @@ async def test_memory_checkpoint_storage_list(): await storage.save(checkpoint3) # Test list_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_ids("workflow-1") + workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids # Test list for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list("workflow-1") + workflow1_checkpoints = await storage.list_checkpoints("workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) assert {cp.checkpoint_id for cp in workflow1_checkpoints} == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} # Test list_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_ids("workflow-2") + workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids # Test list for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list("workflow-2") + workflow2_checkpoints = await storage.list_checkpoints("workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id # Test list_ids for non-existent workflow - empty_checkpoint_ids = await storage.list_ids("nonexistent-workflow") + empty_checkpoint_ids = await storage.list_checkpoint_ids("nonexistent-workflow") assert len(empty_checkpoint_ids) == 0 # Test list for non-existent workflow - empty_checkpoints = await storage.list("nonexistent-workflow") + empty_checkpoints = await storage.list_checkpoints("nonexistent-workflow") assert len(empty_checkpoints) == 0 @@ -203,25 +203,25 @@ async def test_file_checkpoint_storage_list(): await storage.save(checkpoint3) # Test list_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_ids("workflow-1") + workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids # Test list for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list("workflow-1") + workflow1_checkpoints = await storage.list_checkpoints("workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) checkpoint_ids = {cp.checkpoint_id for cp in workflow1_checkpoints} assert checkpoint_ids == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} # Test list_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_ids("workflow-2") + workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids # Test list for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list("workflow-2") + workflow2_checkpoints = await storage.list_checkpoints("workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id @@ -273,7 +273,7 @@ async def test_file_checkpoint_storage_corrupted_file(): f.write("{ invalid json }") # list should handle the corrupted file gracefully - checkpoints = await storage.list("any-workflow") + checkpoints = await storage.list_checkpoints("any-workflow") assert checkpoints == [] @@ -323,12 +323,12 @@ def test_checkpoint_storage_protocol_compliance(): assert callable(storage.save) assert hasattr(storage, "load") assert callable(storage.load) - assert hasattr(storage, "list") - assert callable(storage.list) + assert hasattr(storage, "list_checkpoints") + assert callable(storage.list_checkpoints) assert hasattr(storage, "delete") assert callable(storage.delete) - assert hasattr(storage, "list_ids") - assert callable(storage.list_ids) + assert hasattr(storage, "list_checkpoint_ids") + assert callable(storage.list_checkpoint_ids) assert hasattr(storage, "get_latest") assert callable(storage.get_latest) @@ -440,7 +440,7 @@ async def test_file_checkpoint_storage_list_ids_corrupted_file(): f.write("{ invalid json }") # list_ids should handle the corrupted file gracefully - checkpoint_ids = await storage.list_ids("test-workflow") + checkpoint_ids = await storage.list_checkpoint_ids("test-workflow") assert len(checkpoint_ids) == 1 assert checkpoint.checkpoint_id in checkpoint_ids @@ -450,7 +450,7 @@ async def test_file_checkpoint_storage_list_ids_empty(): storage = FileCheckpointStorage(temp_dir) # Test list_ids on empty storage - checkpoint_ids = await storage.list_ids("any-workflow") + checkpoint_ids = await storage.list_checkpoint_ids("any-workflow") assert checkpoint_ids == [] @@ -493,7 +493,7 @@ async def finish(self, message: str, ctx: WorkflowContext[Never, str]) -> None: _ = [event async for event in workflow.run("hello", stream=True)] # Get all checkpoints sorted by timestamp - checkpoints = sorted(await storage.list(workflow.name), key=lambda c: c.timestamp) + checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) # Should have multiple checkpoints (one initial + one per superstep) assert len(checkpoints) >= 2, f"Expected at least 2 checkpoints, got {len(checkpoints)}" diff --git a/python/packages/core/tests/workflow/test_checkpoint_validation.py b/python/packages/core/tests/workflow/test_checkpoint_validation.py index 38c54b81da..b9a91820a6 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_validation.py +++ b/python/packages/core/tests/workflow/test_checkpoint_validation.py @@ -43,7 +43,7 @@ async def test_resume_fails_when_graph_mismatch() -> None: # Run once to create checkpoints _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = await storage.list(workflow.name) + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] @@ -66,7 +66,7 @@ async def test_resume_succeeds_when_graph_matches() -> None: workflow = build_workflow(storage, finish_id="finish") _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = sorted(await storage.list(workflow.name), key=lambda c: c.timestamp) + checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) target_checkpoint = checkpoints[0] resumed_workflow = build_workflow(storage, finish_id="finish") diff --git a/python/packages/core/tests/workflow/test_request_info_and_response.py b/python/packages/core/tests/workflow/test_request_info_and_response.py index 7681f75f0b..d883ac1b27 100644 --- a/python/packages/core/tests/workflow/test_request_info_and_response.py +++ b/python/packages/core/tests/workflow/test_request_info_and_response.py @@ -349,7 +349,7 @@ async def test_checkpoint_with_pending_request_info_events(self): assert request_info_event.source_executor_id == "approval_executor" # Step 2: List checkpoints to find the one with our pending request - checkpoints = await storage.list(workflow.name) + checkpoints = await storage.list_checkpoints(workflow.name) assert len(checkpoints) > 0, "No checkpoints were created during workflow execution" # Find the checkpoint with our pending request diff --git a/python/packages/core/tests/workflow/test_sub_workflow.py b/python/packages/core/tests/workflow/test_sub_workflow.py index d1db6882a1..6bf51b13a9 100644 --- a/python/packages/core/tests/workflow/test_sub_workflow.py +++ b/python/packages/core/tests/workflow/test_sub_workflow.py @@ -595,7 +595,7 @@ async def test_sub_workflow_checkpoint_restore_no_duplicate_requests() -> None: assert first_request_id is not None # Get checkpoint - checkpoints = await storage.list(workflow1.name) + checkpoints = await storage.list_checkpoints(workflow1.name) checkpoint_id = max(checkpoints, key=lambda cp: cp.iteration_count).checkpoint_id # Step 2: Resume workflow from checkpoint diff --git a/python/packages/core/tests/workflow/test_workflow.py b/python/packages/core/tests/workflow/test_workflow.py index 8079e464e6..cfb79383ed 100644 --- a/python/packages/core/tests/workflow/test_workflow.py +++ b/python/packages/core/tests/workflow/test_workflow.py @@ -544,7 +544,7 @@ async def test_workflow_checkpoint_runtime_only_configuration( assert result.get_final_state() == WorkflowRunState.IDLE # Verify checkpoints were created - checkpoints = await storage.list(workflow.name) + checkpoints = await storage.list_checkpoints(workflow.name) assert len(checkpoints) > 0 # Find a superstep checkpoint to resume from @@ -594,8 +594,8 @@ async def test_workflow_checkpoint_runtime_overrides_buildtime( assert result is not None # Verify checkpoints were created in runtime storage, not build-time storage - buildtime_checkpoints = await buildtime_storage.list(workflow.name) - runtime_checkpoints = await runtime_storage.list(workflow.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/core/tests/workflow/test_workflow_agent.py b/python/packages/core/tests/workflow/test_workflow_agent.py index 00d5c7e6a6..3eb1405956 100644 --- a/python/packages/core/tests/workflow/test_workflow_agent.py +++ b/python/packages/core/tests/workflow/test_workflow_agent.py @@ -609,7 +609,7 @@ async def test_checkpoint_storage_passed_to_workflow(self) -> None: # Drain workflow events to get checkpoint # The workflow should have created checkpoints - checkpoints = await checkpoint_storage.list(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) assert len(checkpoints) > 0, "Checkpoints should have been created when checkpoint_storage is provided" async def test_agent_executor_output_response_false_filters_streaming_events(self): diff --git a/python/packages/devui/agent_framework_devui/_executor.py b/python/packages/devui/agent_framework_devui/_executor.py index 0a487cbad3..f94183e745 100644 --- a/python/packages/devui/agent_framework_devui/_executor.py +++ b/python/packages/devui/agent_framework_devui/_executor.py @@ -428,7 +428,7 @@ async def _execute_workflow( elif hil_responses: # Only auto-resume from latest checkpoint when we have HIL responses # Regular "Run" clicks should start fresh, not resume from checkpoints - checkpoints = await checkpoint_storage.list_checkpoints() # No workflow_id filter needed! + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) if checkpoints: latest = max(checkpoints, key=lambda cp: cp.timestamp) checkpoint_id = latest.checkpoint_id diff --git a/python/packages/devui/agent_framework_devui/_server.py b/python/packages/devui/agent_framework_devui/_server.py index 6393f23b4a..49ae9935ab 100644 --- a/python/packages/devui/agent_framework_devui/_server.py +++ b/python/packages/devui/agent_framework_devui/_server.py @@ -1057,7 +1057,7 @@ async def delete_conversation_item(conversation_id: str, item_id: str) -> dict[s # Extract checkpoint_id from item_id (format: "checkpoint_{checkpoint_id}") checkpoint_id = item_id[len("checkpoint_") :] storage = executor.checkpoint_manager.get_checkpoint_storage(conversation_id) - deleted = await storage.delete_checkpoint(checkpoint_id) + deleted = await storage.delete(checkpoint_id) if not deleted: raise HTTPException(status_code=404, detail="Checkpoint not found") From 310c77111ffa44e303786bc5615f4e9a1f5f1ff8 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 9 Feb 2026 09:28:03 -0800 Subject: [PATCH 07/16] Fix unit tests --- .../agent_framework/_workflows/_runner.py | 4 +-- .../orchestrations/tests/test_concurrent.py | 19 ++++------- .../orchestrations/tests/test_group_chat.py | 8 ++--- .../orchestrations/tests/test_handoff.py | 2 +- .../orchestrations/tests/test_magentic.py | 32 ++++++++++--------- .../orchestrations/tests/test_sequential.py | 28 +++++----------- 6 files changed, 38 insertions(+), 55 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_runner.py b/python/packages/core/agent_framework/_workflows/_runner.py index c37a915b19..2e026a1acc 100644 --- a/python/packages/core/agent_framework/_workflows/_runner.py +++ b/python/packages/core/agent_framework/_workflows/_runner.py @@ -91,8 +91,8 @@ async def run_until_convergence(self) -> AsyncGenerator[WorkflowEvent, None]: # Create the first checkpoint. Checkpoints are usually considered to be created at the end of an iteration, # we can think of the first checkpoint as being created at the end of a "superstep 0" which captures the - # initial state before any iterations have run. This is only needed if it's not a resume from checkpoint - # scenario, since if we are resuming, the caller should have already created a checkpoint to resume from. + # states after which the start executor has run. Note that we execute the start executor outside of the + # main iteration loop. if await self._ctx.has_messages() and not self._resumed_from_checkpoint: previous_checkpoint_id = await self._create_checkpoint_if_enabled(previous_checkpoint_id) diff --git a/python/packages/orchestrations/tests/test_concurrent.py b/python/packages/orchestrations/tests/test_concurrent.py index cecc8500c8..00e540afc4 100644 --- a/python/packages/orchestrations/tests/test_concurrent.py +++ b/python/packages/orchestrations/tests/test_concurrent.py @@ -352,13 +352,10 @@ async def test_concurrent_checkpoint_resume_round_trip() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) - resume_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + resume_checkpoint = checkpoints[1] resumed_participants = ( _FakeAgentExec("agentA", "Alpha"), @@ -398,14 +395,10 @@ async def test_concurrent_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) - - resume_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + resume_checkpoint = checkpoints[1] resumed_agents = [_FakeAgentExec(id="agent1", reply_text="A1"), _FakeAgentExec(id="agent2", reply_text="A2")] wf_resume = ConcurrentBuilder(participants=resumed_agents).build() @@ -448,8 +441,8 @@ async def test_concurrent_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints() - runtime_checkpoints = await runtime_storage.list_checkpoints() + buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/orchestrations/tests/test_group_chat.py b/python/packages/orchestrations/tests/test_group_chat.py index 718b8eb3a7..74a6789cfc 100644 --- a/python/packages/orchestrations/tests/test_group_chat.py +++ b/python/packages/orchestrations/tests/test_group_chat.py @@ -623,7 +623,7 @@ async def test_group_chat_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert len(checkpoints) > 0, "Runtime-only checkpointing should have created checkpoints" @@ -659,8 +659,8 @@ async def test_group_chat_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints() - runtime_checkpoints = await runtime_storage.list_checkpoints() + buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" @@ -913,7 +913,7 @@ def create_beta() -> StubAgent: assert outputs, "Should have workflow output" - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "Checkpoints should be created during workflow execution" diff --git a/python/packages/orchestrations/tests/test_handoff.py b/python/packages/orchestrations/tests/test_handoff.py index 7b382d3511..126082b11b 100644 --- a/python/packages/orchestrations/tests/test_handoff.py +++ b/python/packages/orchestrations/tests/test_handoff.py @@ -675,7 +675,7 @@ def create_specialist() -> MockHandoffAgent: assert outputs, "Should have workflow output after termination condition is met" # List checkpoints - just verify they were created - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "Checkpoints should be created during workflow execution" diff --git a/python/packages/orchestrations/tests/test_magentic.py b/python/packages/orchestrations/tests/test_magentic.py index 5846b56ae4..3ea8e1bc79 100644 --- a/python/packages/orchestrations/tests/test_magentic.py +++ b/python/packages/orchestrations/tests/test_magentic.py @@ -362,7 +362,7 @@ async def test_magentic_checkpoint_resume_round_trip(): assert req_event is not None assert isinstance(req_event.data, MagenticPlanReviewRequest) - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[-1] @@ -605,8 +605,9 @@ async def test_agent_executor_invoke_with_assistants_client_messages(): async def _collect_checkpoints( storage: InMemoryCheckpointStorage, + workflow_name: str, ) -> list[WorkflowCheckpoint]: - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow_name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) return checkpoints @@ -619,12 +620,13 @@ async def test_magentic_checkpoint_resume_inner_loop_superstep(): participants=[StubThreadAgent()], checkpoint_storage=storage, manager=InvokeOnceManager() ).build() - async for event in workflow.run("inner-loop task", stream=True): - if event.type == "output": - break + async for _ in workflow.run("inner-loop task", stream=True): + continue - checkpoints = await _collect_checkpoints(storage) - inner_loop_checkpoint = next(cp for cp in checkpoints if cp.metadata.get("superstep") == 1) # type: ignore[reportUnknownMemberType] + checkpoints = await _collect_checkpoints(storage, workflow.name) + # The first checkpoint is after the manager has run. + # The second checkpoint is after the participant has run. + inner_loop_checkpoint = checkpoints[1] resumed = MagenticBuilder( participants=[StubThreadAgent()], checkpoint_storage=storage, manager=InvokeOnceManager() @@ -651,7 +653,7 @@ async def test_magentic_checkpoint_resume_from_saved_state(): if event.type == "output": break - checkpoints = await _collect_checkpoints(storage) + checkpoints = await _collect_checkpoints(storage, workflow.name) # Verify we can resume from the last saved checkpoint resumed_state = checkpoints[-1] # Use the last checkpoint @@ -688,7 +690,7 @@ async def test_magentic_checkpoint_resume_rejects_participant_renames(): assert req_event is not None assert isinstance(req_event.data, MagenticPlanReviewRequest) - checkpoints = await _collect_checkpoints(storage) + checkpoints = await _collect_checkpoints(storage, workflow.name) target_checkpoint = checkpoints[-1] renamed_workflow = MagenticBuilder( @@ -772,7 +774,7 @@ async def test_magentic_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert len(checkpoints) > 0, "Runtime-only checkpointing should have created checkpoints" @@ -806,8 +808,8 @@ async def test_magentic_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints() - runtime_checkpoints = await runtime_storage.list_checkpoints() + buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" @@ -856,13 +858,13 @@ async def test_magentic_checkpoint_restore_no_duplicate_history(): break # Get checkpoint - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert len(checkpoints) > 0, "Should have created checkpoints" latest_checkpoint = checkpoints[-1] # Load checkpoint and verify no duplicates in state - checkpoint_data = await storage.load_checkpoint(latest_checkpoint.checkpoint_id) + checkpoint_data = await storage.load(latest_checkpoint.checkpoint_id) assert checkpoint_data is not None # Check the magentic_context in the checkpoint @@ -1001,7 +1003,7 @@ def create_agent() -> StubAgent: assert outputs, "Should have workflow output" - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "Checkpoints should be created during workflow execution" diff --git a/python/packages/orchestrations/tests/test_sequential.py b/python/packages/orchestrations/tests/test_sequential.py index cb6f3b0872..921889fed6 100644 --- a/python/packages/orchestrations/tests/test_sequential.py +++ b/python/packages/orchestrations/tests/test_sequential.py @@ -224,14 +224,10 @@ async def test_sequential_checkpoint_resume_round_trip() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) - - resume_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + resume_checkpoint = checkpoints[0] resumed_agents = (_EchoAgent(id="agent1", name="A1"), _EchoAgent(id="agent2", name="A2")) wf_resume = SequentialBuilder(participants=list(resumed_agents), checkpoint_storage=storage).build() @@ -267,14 +263,10 @@ async def test_sequential_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) - - resume_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + resume_checkpoint = checkpoints[0] resumed_agents = (_EchoAgent(id="agent1", name="A1"), _EchoAgent(id="agent2", name="A2")) wf_resume = SequentialBuilder(participants=list(resumed_agents)).build() @@ -318,8 +310,8 @@ async def test_sequential_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints() - runtime_checkpoints = await runtime_storage.list_checkpoints() + buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" @@ -346,14 +338,10 @@ def create_agent2() -> _EchoAgent: assert baseline_output is not None - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) - - resume_checkpoint = next( - (cp for cp in checkpoints if (cp.metadata or {}).get("checkpoint_type") == "superstep"), - checkpoints[-1], - ) + resume_checkpoint = checkpoints[0] wf_resume = SequentialBuilder( participant_factories=[create_agent1, create_agent2], checkpoint_storage=storage From 1aa2da18ab8a55cff6265d5b5ff1337dbc3fd3b2 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 9 Feb 2026 14:02:52 -0800 Subject: [PATCH 08/16] Fix samples --- .../agent_framework/_workflows/_checkpoint.py | 13 +- .../agent_framework/_workflows/_workflow.py | 9 + .../_workflows/_workflow_executor.py | 15 - .../core/tests/workflow/test_checkpoint.py | 1370 ++++++++++++++--- .../test_request_info_and_response.py | 86 -- .../test_request_info_event_rehydrate.py | 248 ++- ...ff_with_tool_approval_checkpoint_resume.py | 236 +++ .../orchestrations/magentic_checkpoint.py | 4 +- .../checkpoint_with_human_in_the_loop.py | 6 +- .../checkpoint/checkpoint_with_resume.py | 2 +- ...ff_with_tool_approval_checkpoint_resume.py | 405 ----- .../checkpoint/sub_workflow_checkpoint.py | 2 +- .../workflow_as_agent_checkpoint.py | 6 +- 13 files changed, 1664 insertions(+), 738 deletions(-) create mode 100644 python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py delete mode 100644 python/samples/getting_started/workflows/checkpoint/handoff_with_tool_approval_checkpoint_resume.py diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index af73a7d854..eea8079d42 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -8,7 +8,7 @@ import os import uuid from collections.abc import Mapping -from dataclasses import asdict, dataclass, field +from dataclasses import dataclass, field, fields from datetime import datetime, timezone from pathlib import Path from typing import TYPE_CHECKING, Any, Protocol, TypeAlias @@ -87,7 +87,14 @@ class WorkflowCheckpoint: version: str = "1.0" def to_dict(self) -> dict[str, Any]: - return asdict(self) + """Convert the WorkflowCheckpoint to a dictionary. + + Notes: + 1. This method does not recursively convert nested dataclasses to dicts. + 2. This is a shallow conversion. The resulting dict will contain the same + references to nested objects as the original dataclass. + """ + return {f.name: getattr(self, f.name) for f in fields(self)} @classmethod def from_dict(cls, data: Mapping[str, Any]) -> WorkflowCheckpoint: @@ -244,7 +251,7 @@ async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: from ._checkpoint_encoding import encode_checkpoint_value file_path = self.storage_path / f"{checkpoint.checkpoint_id}.json" - checkpoint_dict = asdict(checkpoint) + checkpoint_dict = checkpoint.to_dict() encoded_checkpoint = encode_checkpoint_value(checkpoint_dict) def _write_atomic() -> None: diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index a043a785f9..72c1945a9c 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -567,6 +567,15 @@ async def _run_core( ): if event.type == "output" and not self._should_yield_output_event(event): continue + if event.type == "request_info" and event.request_id in (responses or {}): + # Don't yield request_info events for which we have responses to send - + # these are considered "handled". This prevents the caller from seeing + # events for requests they are already responding to. + # This usually happens when responses are provided with a checkpoint + # (restore then send), because the request_info events are stored in the + # checkpoint and would be emitted on restoration by the runner regardless + # of if a response is provided or not. + continue yield event async def _run_cleanup(self, checkpoint_storage: CheckpointStorage | None) -> None: diff --git a/python/packages/core/agent_framework/_workflows/_workflow_executor.py b/python/packages/core/agent_framework/_workflows/_workflow_executor.py index 88ce8936ac..37481a03b0 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow_executor.py +++ b/python/packages/core/agent_framework/_workflows/_workflow_executor.py @@ -653,21 +653,6 @@ async def _handle_response( try: # Resume the sub-workflow with all collected responses result = await self.workflow.run(responses=responses_to_send) - # Remove handled requests from result. The result may contain the original - # RequestInfoEvents that were already handled. This is due to checkpointing - # and rehydration of the workflow that re-adds the RequestInfoEvents to the - # workflow's _runner_context thus the event queue. When the workflow is resumed, - # those events will be emitted at the very beginning of the superstep, prior to - # processing messages/responses, creating the illusion that the workflow is - # requesting the same information again. - for request_id in responses_to_send: - event_to_remove = next( - (event for event in result if event.type == "request_info" and event.request_id == request_id), - None, - ) - if event_to_remove: - result.remove(event_to_remove) - # Process the workflow result using shared logic await self._process_workflow_result(result, execution_context, ctx) finally: diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index cccd8907f3..e39f2c0862 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -2,6 +2,7 @@ import json import tempfile +from dataclasses import dataclass from datetime import datetime, timezone from pathlib import Path @@ -12,7 +13,47 @@ InMemoryCheckpointStorage, WorkflowCheckpoint, WorkflowCheckpointException, + WorkflowEvent, ) +from agent_framework._workflows._runner_context import Message + + +# Module-level dataclasses for pickle serialization in roundtrip tests +@dataclass +class _TestToolApprovalRequest: + """Request data for tool approval in tests.""" + + tool_name: str + arguments: dict + timestamp: datetime + + +@dataclass +class _TestExecutorState: + """Executor state for tests.""" + + counter: int + history: list[str] + + +@dataclass +class _TestApprovalRequest: + """Approval request data for tests.""" + + action: str + params: tuple + + +@dataclass +class _TestCustomData: + """Custom data for tests.""" + + name: str + value: int + tags: list[str] + + +# region test WorkflowCheckpoint def test_workflow_checkpoint_default_values(): @@ -57,6 +98,64 @@ def test_workflow_checkpoint_custom_values(): assert checkpoint.version == "2.0" +def test_workflow_checkpoint_to_dict(): + checkpoint = WorkflowCheckpoint( + checkpoint_id="test-id", + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages={"executor1": [{"data": "test"}]}, + state={"key": "value"}, + iteration_count=5, + ) + + result = checkpoint.to_dict() + + assert result["checkpoint_id"] == "test-id" + assert result["workflow_name"] == "test-workflow" + assert result["graph_signature_hash"] == "test-hash" + assert result["messages"] == {"executor1": [{"data": "test"}]} + assert result["state"] == {"key": "value"} + assert result["iteration_count"] == 5 + + +def test_workflow_checkpoint_previous_checkpoint_id(): + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + previous_checkpoint_id="previous-id-123", + ) + + assert checkpoint.previous_checkpoint_id == "previous-id-123" + + +# endregion + +# region InMemoryCheckpointStorage + + +def test_checkpoint_storage_protocol_compliance(): + # This test ensures both implementations have all required methods + memory_storage = InMemoryCheckpointStorage() + + with tempfile.TemporaryDirectory() as temp_dir: + file_storage = FileCheckpointStorage(temp_dir) + + for storage in [memory_storage, file_storage]: + # Test that all protocol methods exist and are callable + assert hasattr(storage, "save") + assert callable(storage.save) + assert hasattr(storage, "load") + assert callable(storage.load) + assert hasattr(storage, "list_checkpoints") + assert callable(storage.list_checkpoints) + assert hasattr(storage, "delete") + assert callable(storage.delete) + assert hasattr(storage, "list_checkpoint_ids") + assert callable(storage.list_checkpoint_ids) + assert hasattr(storage, "get_latest") + assert callable(storage.get_latest) + + async def test_memory_checkpoint_storage_save_and_load(): storage = InMemoryCheckpointStorage() checkpoint = WorkflowCheckpoint( @@ -151,247 +250,683 @@ async def test_memory_checkpoint_storage_delete(): assert result is False -async def test_file_checkpoint_storage_save_and_load(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) - checkpoint = WorkflowCheckpoint( - workflow_name="test-workflow", - graph_signature_hash="test-hash", - messages={"executor1": [{"data": "hello", "source_id": "test", "target_id": None}]}, - state={"key": "value"}, - pending_request_info_events={"req123": {"data": "test"}}, - ) +async def test_memory_checkpoint_storage_get_latest(): + import asyncio - # Save checkpoint - saved_id = await storage.save(checkpoint) - assert saved_id == checkpoint.checkpoint_id + storage = InMemoryCheckpointStorage() - # Verify file was created - file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" - assert file_path.exists() + # Create checkpoints with small delays to ensure different timestamps + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + await asyncio.sleep(0.01) + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + await asyncio.sleep(0.01) + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") - # Load checkpoint - loaded_checkpoint = await storage.load(checkpoint.checkpoint_id) - assert loaded_checkpoint is not None - assert loaded_checkpoint.checkpoint_id == checkpoint.checkpoint_id - assert loaded_checkpoint.workflow_name == checkpoint.workflow_name - assert loaded_checkpoint.graph_signature_hash == checkpoint.graph_signature_hash - assert loaded_checkpoint.messages == checkpoint.messages - assert loaded_checkpoint.state == checkpoint.state - assert loaded_checkpoint.pending_request_info_events == checkpoint.pending_request_info_events + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) + # Test get_latest for workflow-1 + latest = await storage.get_latest("workflow-1") + assert latest is not None + assert latest.checkpoint_id == checkpoint2.checkpoint_id -async def test_file_checkpoint_storage_load_nonexistent(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) + # Test get_latest for workflow-2 + latest2 = await storage.get_latest("workflow-2") + assert latest2 is not None + assert latest2.checkpoint_id == checkpoint3.checkpoint_id - with pytest.raises(WorkflowCheckpointException): - await storage.load("nonexistent-id") + # Test get_latest for non-existent workflow + latest_none = await storage.get_latest("nonexistent-workflow") + assert latest_none is None -async def test_file_checkpoint_storage_list(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) +async def test_workflow_checkpoint_chaining_via_previous_checkpoint_id(): + """Test that consecutive checkpoints created by a workflow are properly chained via previous_checkpoint_id.""" + from typing_extensions import Never - # Create checkpoints for different workflows - checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") - checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") - checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") + from agent_framework import WorkflowBuilder, WorkflowContext, handler + from agent_framework._workflows._executor import Executor - await storage.save(checkpoint1) - await storage.save(checkpoint2) - await storage.save(checkpoint3) + class StartExecutor(Executor): + @handler + async def run(self, message: str, ctx: WorkflowContext[str]) -> None: + await ctx.send_message(message, target_id="middle") - # Test list_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") - assert len(workflow1_checkpoint_ids) == 2 - assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids - assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids + class MiddleExecutor(Executor): + @handler + async def process(self, message: str, ctx: WorkflowContext[str]) -> None: + await ctx.send_message(message + "-processed", target_id="finish") - # Test list for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list_checkpoints("workflow-1") - assert len(workflow1_checkpoints) == 2 - assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) - checkpoint_ids = {cp.checkpoint_id for cp in workflow1_checkpoints} - assert checkpoint_ids == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} + class FinishExecutor(Executor): + @handler + async def finish(self, message: str, ctx: WorkflowContext[Never, str]) -> None: + await ctx.yield_output(message + "-done") - # Test list_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") - assert len(workflow2_checkpoint_ids) == 1 - assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids + storage = InMemoryCheckpointStorage() - # Test list for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list_checkpoints("workflow-2") - assert len(workflow2_checkpoints) == 1 - assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id + start = StartExecutor(id="start") + middle = MiddleExecutor(id="middle") + finish = FinishExecutor(id="finish") + workflow = ( + WorkflowBuilder(max_iterations=10, start_executor=start, checkpoint_storage=storage) + .add_edge(start, middle) + .add_edge(middle, finish) + .build() + ) -async def test_file_checkpoint_storage_delete(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) - checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") + # Run workflow - this creates checkpoints at each superstep + _ = [event async for event in workflow.run("hello", stream=True)] - # Save checkpoint - await storage.save(checkpoint) - file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" - assert file_path.exists() + # Get all checkpoints sorted by timestamp + checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) - # Delete checkpoint - result = await storage.delete(checkpoint.checkpoint_id) - assert result is True - assert not file_path.exists() + # Should have multiple checkpoints (one initial + one per superstep) + assert len(checkpoints) >= 2, f"Expected at least 2 checkpoints, got {len(checkpoints)}" - # Try to delete again - result = await storage.delete(checkpoint.checkpoint_id) - assert result is False + # Verify chaining: first checkpoint has no previous + assert checkpoints[0].previous_checkpoint_id is None + # Subsequent checkpoints should chain to the previous one + for i in range(1, len(checkpoints)): + assert checkpoints[i].previous_checkpoint_id == checkpoints[i - 1].checkpoint_id, ( + f"Checkpoint {i} should chain to checkpoint {i - 1}" + ) -async def test_file_checkpoint_storage_directory_creation(): - with tempfile.TemporaryDirectory() as temp_dir: - nested_path = Path(temp_dir) / "nested" / "checkpoint" / "storage" - storage = FileCheckpointStorage(nested_path) - # Directory should be created - assert nested_path.exists() - assert nested_path.is_dir() +async def test_memory_checkpoint_storage_roundtrip_json_native_types(): + """Test that JSON-native types (str, int, float, bool, None) roundtrip correctly.""" + storage = InMemoryCheckpointStorage() - # Should be able to save checkpoints - checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") - await storage.save(checkpoint) + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "string": "hello world", + "integer": 42, + "negative_int": -100, + "float": 3.14159, + "negative_float": -2.71828, + "bool_true": True, + "bool_false": False, + "null_value": None, + "zero": 0, + "empty_string": "", + }, + ) - file_path = nested_path / f"{checkpoint.checkpoint_id}.json" - assert file_path.exists() + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + assert loaded.state == checkpoint.state -async def test_file_checkpoint_storage_corrupted_file(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) - # Create a corrupted JSON file - corrupted_file = Path(temp_dir) / "corrupted.json" - with open(corrupted_file, "w") as f: # noqa: ASYNC230 - f.write("{ invalid json }") +async def test_memory_checkpoint_storage_roundtrip_datetime(): + """Test that datetime objects roundtrip correctly.""" + storage = InMemoryCheckpointStorage() - # list should handle the corrupted file gracefully - checkpoints = await storage.list_checkpoints("any-workflow") - assert checkpoints == [] + now = datetime.now(timezone.utc) + specific_datetime = datetime(2025, 6, 15, 10, 30, 45, 123456, tzinfo=timezone.utc) + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "current_time": now, + "specific_time": specific_datetime, + "nested": {"created_at": now, "updated_at": specific_datetime}, + }, + ) -async def test_file_checkpoint_storage_json_serialization(): - with tempfile.TemporaryDirectory() as temp_dir: - storage = FileCheckpointStorage(temp_dir) + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) - # Create checkpoint with complex nested data - checkpoint = WorkflowCheckpoint( - workflow_name="test-workflow", - graph_signature_hash="test-hash", - messages={"executor1": [{"data": {"nested": {"value": 42}}, "source_id": "test", "target_id": None}]}, - state={"list": [1, 2, 3], "dict": {"a": "b", "c": {"d": "e"}}, "bool": True, "null": None}, - pending_request_info_events={"req123": {"data": "test"}}, - ) + assert loaded.state["current_time"] == now + assert loaded.state["specific_time"] == specific_datetime + assert loaded.state["nested"]["created_at"] == now + assert loaded.state["nested"]["updated_at"] == specific_datetime - # Save and load - await storage.save(checkpoint) - loaded = await storage.load(checkpoint.checkpoint_id) - assert loaded is not None - assert loaded.messages == checkpoint.messages - assert loaded.state == checkpoint.state +async def test_memory_checkpoint_storage_roundtrip_dataclass(): + """Test that dataclass objects roundtrip correctly.""" + storage = InMemoryCheckpointStorage() - # Verify the JSON file is properly formatted - file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" - with open(file_path) as f: # noqa: ASYNC230 - data = json.load(f) + custom_obj = _TestCustomData(name="test", value=42, tags=["a", "b", "c"]) - assert data["messages"]["executor1"][0]["data"]["nested"]["value"] == 42 - assert data["state"]["list"] == [1, 2, 3] - assert data["state"]["bool"] is True - assert data["state"]["null"] is None - assert data["pending_request_info_events"]["req123"]["data"] == "test" + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "custom_data": custom_obj, + "nested": {"inner_data": custom_obj}, + }, + ) + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) -def test_checkpoint_storage_protocol_compliance(): - # This test ensures both implementations have all required methods - memory_storage = InMemoryCheckpointStorage() + assert loaded.state["custom_data"] == custom_obj + assert loaded.state["custom_data"].name == "test" + assert loaded.state["custom_data"].value == 42 + assert loaded.state["custom_data"].tags == ["a", "b", "c"] + assert loaded.state["nested"]["inner_data"] == custom_obj + assert isinstance(loaded.state["custom_data"], _TestCustomData) - with tempfile.TemporaryDirectory() as temp_dir: - file_storage = FileCheckpointStorage(temp_dir) - for storage in [memory_storage, file_storage]: - # Test that all protocol methods exist and are callable - assert hasattr(storage, "save") - assert callable(storage.save) - assert hasattr(storage, "load") - assert callable(storage.load) - assert hasattr(storage, "list_checkpoints") - assert callable(storage.list_checkpoints) - assert hasattr(storage, "delete") - assert callable(storage.delete) - assert hasattr(storage, "list_checkpoint_ids") - assert callable(storage.list_checkpoint_ids) - assert hasattr(storage, "get_latest") - assert callable(storage.get_latest) +async def test_memory_checkpoint_storage_roundtrip_tuple_and_set(): + """Test that tuples and frozensets roundtrip correctly (type preserved in memory).""" + storage = InMemoryCheckpointStorage() + original_tuple = (1, "two", 3.0, None) + original_frozenset = frozenset({1, 2, 3}) -def test_workflow_checkpoint_to_dict(): checkpoint = WorkflowCheckpoint( - checkpoint_id="test-id", workflow_name="test-workflow", graph_signature_hash="test-hash", - messages={"executor1": [{"data": "test"}]}, - state={"key": "value"}, - iteration_count=5, + state={ + "my_tuple": original_tuple, + "my_frozenset": original_frozenset, + "nested_tuple": {"inner": (10, 20, 30)}, + }, ) - result = checkpoint.to_dict() + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) - assert result["checkpoint_id"] == "test-id" - assert result["workflow_name"] == "test-workflow" - assert result["graph_signature_hash"] == "test-hash" - assert result["messages"] == {"executor1": [{"data": "test"}]} - assert result["state"] == {"key": "value"} - assert result["iteration_count"] == 5 + # In-memory storage preserves exact types (no JSON serialization) + assert loaded.state["my_tuple"] == original_tuple + assert isinstance(loaded.state["my_tuple"], tuple) + assert loaded.state["my_frozenset"] == original_frozenset + assert isinstance(loaded.state["my_frozenset"], frozenset) + assert loaded.state["nested_tuple"]["inner"] == (10, 20, 30) + assert isinstance(loaded.state["nested_tuple"]["inner"], tuple) -def test_workflow_checkpoint_previous_checkpoint_id(): +async def test_memory_checkpoint_storage_roundtrip_complex_nested_structures(): + """Test complex nested structures with mixed types roundtrip correctly.""" + storage = InMemoryCheckpointStorage() + + # Create complex nested structure mixing JSON-native and non-native types + complex_state = { + "level1": { + "level2": { + "level3": { + "deep_string": "hello", + "deep_int": 123, + "deep_datetime": datetime(2025, 1, 1, tzinfo=timezone.utc), + "deep_tuple": (1, 2, 3), + } + }, + "list_of_dicts": [ + {"a": 1, "b": datetime(2025, 2, 1, tzinfo=timezone.utc)}, + {"c": 2, "d": (4, 5, 6)}, + ], + }, + "mixed_list": [ + "string", + 42, + 3.14, + True, + None, + datetime(2025, 3, 1, tzinfo=timezone.utc), + (7, 8, 9), + ], + } + checkpoint = WorkflowCheckpoint( workflow_name="test-workflow", graph_signature_hash="test-hash", - previous_checkpoint_id="previous-id-123", + state=complex_state, ) - assert checkpoint.previous_checkpoint_id == "previous-id-123" + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify deep nested values + assert loaded.state["level1"]["level2"]["level3"]["deep_string"] == "hello" + assert loaded.state["level1"]["level2"]["level3"]["deep_int"] == 123 + assert loaded.state["level1"]["level2"]["level3"]["deep_datetime"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded.state["level1"]["level2"]["level3"]["deep_tuple"] == (1, 2, 3) + assert isinstance(loaded.state["level1"]["level2"]["level3"]["deep_tuple"], tuple) + + # Verify list of dicts + assert loaded.state["level1"]["list_of_dicts"][0]["a"] == 1 + assert loaded.state["level1"]["list_of_dicts"][0]["b"] == datetime(2025, 2, 1, tzinfo=timezone.utc) + assert loaded.state["level1"]["list_of_dicts"][1]["d"] == (4, 5, 6) + assert isinstance(loaded.state["level1"]["list_of_dicts"][1]["d"], tuple) + + # Verify mixed list with correct types + assert loaded.state["mixed_list"][0] == "string" + assert loaded.state["mixed_list"][1] == 42 + assert loaded.state["mixed_list"][5] == datetime(2025, 3, 1, tzinfo=timezone.utc) + assert loaded.state["mixed_list"][6] == (7, 8, 9) + assert isinstance(loaded.state["mixed_list"][6], tuple) + + +async def test_memory_checkpoint_storage_roundtrip_messages_with_complex_data(): + """Test that messages dict with Message objects roundtrips correctly.""" + storage = InMemoryCheckpointStorage() + msg1 = Message( + data={"text": "hello", "timestamp": datetime(2025, 1, 1, tzinfo=timezone.utc)}, + source_id="source", + target_id="target", + ) + msg2 = Message( + data=(1, 2, 3), + source_id="s2", + target_id=None, + ) + msg3 = Message( + data="simple string", + source_id="s3", + target_id="t3", + ) -async def test_memory_checkpoint_storage_get_latest(): - import asyncio + messages = { + "executor1": [msg1, msg2], + "executor2": [msg3], + } + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages=messages, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify messages structure and types + assert len(loaded.messages["executor1"]) == 2 + loaded_msg1 = loaded.messages["executor1"][0] + loaded_msg2 = loaded.messages["executor1"][1] + loaded_msg3 = loaded.messages["executor2"][0] + + # Verify Message type is preserved + assert isinstance(loaded_msg1, Message) + assert isinstance(loaded_msg2, Message) + assert isinstance(loaded_msg3, Message) + + # Verify Message fields + assert loaded_msg1.data["text"] == "hello" + assert loaded_msg1.data["timestamp"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded_msg1.source_id == "source" + assert loaded_msg1.target_id == "target" + + assert loaded_msg2.data == (1, 2, 3) + assert isinstance(loaded_msg2.data, tuple) + assert loaded_msg2.source_id == "s2" + assert loaded_msg2.target_id is None + assert loaded_msg3.data == "simple string" + assert loaded_msg3.source_id == "s3" + assert loaded_msg3.target_id == "t3" + + +async def test_memory_checkpoint_storage_roundtrip_pending_request_info_events(): + """Test that pending_request_info_events with WorkflowEvent objects roundtrip correctly.""" storage = InMemoryCheckpointStorage() - # Create checkpoints with small delays to ensure different timestamps - checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") - await asyncio.sleep(0.01) - checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") - await asyncio.sleep(0.01) - checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") + # Create request_info events using the proper WorkflowEvent factory + event1 = WorkflowEvent.request_info( + request_id="req123", + source_executor_id="executor1", + request_data="What is your name?", + response_type=str, + ) + event2 = WorkflowEvent.request_info( + request_id="req456", + source_executor_id="executor2", + request_data=_TestToolApprovalRequest( + tool_name="search", + arguments={"query": "test"}, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + response_type=bool, + ) - await storage.save(checkpoint1) - await storage.save(checkpoint2) - await storage.save(checkpoint3) + pending_events = { + "req123": event1, + "req456": event2, + } - # Test get_latest for workflow-1 - latest = await storage.get_latest("workflow-1") - assert latest is not None - assert latest.checkpoint_id == checkpoint2.checkpoint_id + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + pending_request_info_events=pending_events, + ) - # Test get_latest for workflow-2 - latest2 = await storage.get_latest("workflow-2") - assert latest2 is not None - assert latest2.checkpoint_id == checkpoint3.checkpoint_id + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify WorkflowEvent type is preserved + loaded_event1 = loaded.pending_request_info_events["req123"] + loaded_event2 = loaded.pending_request_info_events["req456"] + + assert isinstance(loaded_event1, WorkflowEvent) + assert isinstance(loaded_event2, WorkflowEvent) + + # Verify event1 fields + assert loaded_event1.type == "request_info" + assert loaded_event1.request_id == "req123" + assert loaded_event1.source_executor_id == "executor1" + assert loaded_event1.data == "What is your name?" + assert loaded_event1.response_type is str + + # Verify event2 fields with complex data + assert loaded_event2.type == "request_info" + assert loaded_event2.request_id == "req456" + assert loaded_event2.source_executor_id == "executor2" + assert isinstance(loaded_event2.data, _TestToolApprovalRequest) + assert loaded_event2.data.tool_name == "search" + assert loaded_event2.data.arguments == {"query": "test"} + assert loaded_event2.data.timestamp == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded_event2.response_type is bool + + +async def test_memory_checkpoint_storage_roundtrip_full_checkpoint(): + """Test complete WorkflowCheckpoint roundtrip with all fields populated using proper types.""" + storage = InMemoryCheckpointStorage() - # Test get_latest for non-existent workflow - latest_none = await storage.get_latest("nonexistent-workflow") - assert latest_none is None + # Create proper Message objects + msg1 = Message(data="msg1", source_id="s", target_id="t") + msg2 = Message(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") + + # Create proper WorkflowEvent for pending request + pending_event = WorkflowEvent.request_info( + request_id="req1", + source_executor_id="exec1", + request_data=_TestApprovalRequest(action="approve", params=(1, 2, 3)), + response_type=bool, + ) + + checkpoint = WorkflowCheckpoint( + checkpoint_id="full-test-checkpoint", + workflow_name="comprehensive-test", + graph_signature_hash="hash-abc123", + previous_checkpoint_id="previous-checkpoint-id", + timestamp=datetime(2025, 6, 15, 12, 0, 0, tzinfo=timezone.utc).isoformat(), + messages={ + "exec1": [msg1], + "exec2": [msg2], + }, + state={ + "user_data": {"name": "test", "created": datetime(2025, 1, 1, tzinfo=timezone.utc)}, + "_executor_state": { + "exec1": _TestExecutorState(counter=5, history=["a", "b", "c"]), + }, + }, + pending_request_info_events={ + "req1": pending_event, + }, + iteration_count=10, + metadata={ + "superstep": 5, + "started_at": datetime(2025, 6, 15, 11, 0, 0, tzinfo=timezone.utc), + }, + version="1.0", + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify all scalar fields + assert loaded.checkpoint_id == checkpoint.checkpoint_id + assert loaded.workflow_name == checkpoint.workflow_name + assert loaded.graph_signature_hash == checkpoint.graph_signature_hash + assert loaded.previous_checkpoint_id == checkpoint.previous_checkpoint_id + assert loaded.timestamp == checkpoint.timestamp + assert loaded.iteration_count == checkpoint.iteration_count + assert loaded.version == checkpoint.version + + # Verify complex nested state data + assert loaded.state["user_data"]["created"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded.state["_executor_state"]["exec1"].counter == 5 + assert loaded.state["_executor_state"]["exec1"].history == ["a", "b", "c"] + assert isinstance(loaded.state["_executor_state"]["exec1"], _TestExecutorState) + + # Verify messages are proper Message objects + loaded_msg1 = loaded.messages["exec1"][0] + loaded_msg2 = loaded.messages["exec2"][0] + assert isinstance(loaded_msg1, Message) + assert isinstance(loaded_msg2, Message) + assert loaded_msg1.data == "msg1" + assert loaded_msg1.source_id == "s" + assert loaded_msg2.data == datetime(2025, 1, 1, tzinfo=timezone.utc) + + # Verify pending events are proper WorkflowEvent objects + loaded_event = loaded.pending_request_info_events["req1"] + assert isinstance(loaded_event, WorkflowEvent) + assert loaded_event.type == "request_info" + assert loaded_event.request_id == "req1" + assert isinstance(loaded_event.data, _TestApprovalRequest) + assert loaded_event.data.params == (1, 2, 3) + + # Verify metadata + assert loaded.metadata["superstep"] == 5 + assert loaded.metadata["started_at"] == datetime(2025, 6, 15, 11, 0, 0, tzinfo=timezone.utc) + + +async def test_memory_checkpoint_storage_roundtrip_bytes(): + """Test that bytes objects roundtrip correctly.""" + storage = InMemoryCheckpointStorage() + + binary_data = b"\x00\x01\x02\xff\xfe\xfd" + unicode_bytes = "Hello 世界".encode("utf-8") + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "binary_data": binary_data, + "unicode_bytes": unicode_bytes, + "nested": {"inner_bytes": binary_data}, + }, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded.state["binary_data"] == binary_data + assert loaded.state["unicode_bytes"] == unicode_bytes + assert loaded.state["nested"]["inner_bytes"] == binary_data + assert isinstance(loaded.state["binary_data"], bytes) + + +async def test_memory_checkpoint_storage_roundtrip_empty_collections(): + """Test that empty collections roundtrip correctly (types preserved in memory).""" + storage = InMemoryCheckpointStorage() + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "empty_dict": {}, + "empty_list": [], + "empty_tuple": (), + "nested_empty": {"inner_dict": {}, "inner_list": []}, + }, + messages={}, + pending_request_info_events={}, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded.state["empty_dict"] == {} + assert loaded.state["empty_list"] == [] + # In-memory storage preserves exact types (no JSON serialization) + assert loaded.state["empty_tuple"] == () + assert isinstance(loaded.state["empty_tuple"], tuple) + assert loaded.state["nested_empty"]["inner_dict"] == {} + assert loaded.messages == {} + assert loaded.pending_request_info_events == {} + + +# endregion + +# region FileCheckpointStorage + + +async def test_file_checkpoint_storage_save_and_load(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages={"executor1": [{"data": "hello", "source_id": "test", "target_id": None}]}, + state={"key": "value"}, + pending_request_info_events={"req123": {"data": "test"}}, + ) + + # Save checkpoint + saved_id = await storage.save(checkpoint) + assert saved_id == checkpoint.checkpoint_id + + # Verify file was created + file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" + assert file_path.exists() + + # Load checkpoint + loaded_checkpoint = await storage.load(checkpoint.checkpoint_id) + assert loaded_checkpoint is not None + assert loaded_checkpoint.checkpoint_id == checkpoint.checkpoint_id + assert loaded_checkpoint.workflow_name == checkpoint.workflow_name + assert loaded_checkpoint.graph_signature_hash == checkpoint.graph_signature_hash + assert loaded_checkpoint.messages == checkpoint.messages + assert loaded_checkpoint.state == checkpoint.state + assert loaded_checkpoint.pending_request_info_events == checkpoint.pending_request_info_events + + +async def test_file_checkpoint_storage_load_nonexistent(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + with pytest.raises(WorkflowCheckpointException): + await storage.load("nonexistent-id") + + +async def test_file_checkpoint_storage_list(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create checkpoints for different workflows + checkpoint1 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-1") + checkpoint2 = WorkflowCheckpoint(workflow_name="workflow-1", graph_signature_hash="hash-2") + checkpoint3 = WorkflowCheckpoint(workflow_name="workflow-2", graph_signature_hash="hash-3") + + await storage.save(checkpoint1) + await storage.save(checkpoint2) + await storage.save(checkpoint3) + + # Test list_ids for workflow-1 + workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") + assert len(workflow1_checkpoint_ids) == 2 + assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids + assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids + + # Test list for workflow-1 (returns objects) + workflow1_checkpoints = await storage.list_checkpoints("workflow-1") + assert len(workflow1_checkpoints) == 2 + assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) + checkpoint_ids = {cp.checkpoint_id for cp in workflow1_checkpoints} + assert checkpoint_ids == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} + + # Test list_ids for workflow-2 + workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") + assert len(workflow2_checkpoint_ids) == 1 + assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids + + # Test list for workflow-2 (returns objects) + workflow2_checkpoints = await storage.list_checkpoints("workflow-2") + assert len(workflow2_checkpoints) == 1 + assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id + + +async def test_file_checkpoint_storage_delete(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") + + # Save checkpoint + await storage.save(checkpoint) + file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" + assert file_path.exists() + + # Delete checkpoint + result = await storage.delete(checkpoint.checkpoint_id) + assert result is True + assert not file_path.exists() + + # Try to delete again + result = await storage.delete(checkpoint.checkpoint_id) + assert result is False + + +async def test_file_checkpoint_storage_directory_creation(): + with tempfile.TemporaryDirectory() as temp_dir: + nested_path = Path(temp_dir) / "nested" / "checkpoint" / "storage" + storage = FileCheckpointStorage(nested_path) + + # Directory should be created + assert nested_path.exists() + assert nested_path.is_dir() + + # Should be able to save checkpoints + checkpoint = WorkflowCheckpoint(workflow_name="test-workflow", graph_signature_hash="test-hash") + await storage.save(checkpoint) + + file_path = nested_path / f"{checkpoint.checkpoint_id}.json" + assert file_path.exists() + + +async def test_file_checkpoint_storage_corrupted_file(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create a corrupted JSON file + corrupted_file = Path(temp_dir) / "corrupted.json" + with open(corrupted_file, "w") as f: # noqa: ASYNC230 + f.write("{ invalid json }") + + # list should handle the corrupted file gracefully + checkpoints = await storage.list_checkpoints("any-workflow") + assert checkpoints == [] + + +async def test_file_checkpoint_storage_json_serialization(): + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create checkpoint with complex nested data + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages={"executor1": [{"data": {"nested": {"value": 42}}, "source_id": "test", "target_id": None}]}, + state={"list": [1, 2, 3], "dict": {"a": "b", "c": {"d": "e"}}, "bool": True, "null": None}, + pending_request_info_events={"req123": {"data": "test"}}, + ) + + # Save and load + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded is not None + assert loaded.messages == checkpoint.messages + assert loaded.state == checkpoint.state + + # Verify the JSON file is properly formatted + file_path = Path(temp_dir) / f"{checkpoint.checkpoint_id}.json" + with open(file_path) as f: # noqa: ASYNC230 + data = json.load(f) + + assert data["messages"]["executor1"][0]["data"]["nested"]["value"] == 42 + assert data["state"]["list"] == [1, 2, 3] + assert data["state"]["bool"] is True + assert data["state"]["null"] is None + assert data["pending_request_info_events"]["req123"]["data"] == "test" async def test_file_checkpoint_storage_get_latest(): @@ -454,55 +989,456 @@ async def test_file_checkpoint_storage_list_ids_empty(): assert checkpoint_ids == [] -async def test_workflow_checkpoint_chaining_via_previous_checkpoint_id(): - """Test that consecutive checkpoints created by a workflow are properly chained via previous_checkpoint_id.""" - from typing_extensions import Never +async def test_file_checkpoint_storage_roundtrip_json_native_types(): + """Test that JSON-native types (str, int, float, bool, None) roundtrip correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) - from agent_framework import WorkflowBuilder, WorkflowContext, handler - from agent_framework._workflows._executor import Executor + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "string": "hello world", + "integer": 42, + "negative_int": -100, + "float": 3.14159, + "negative_float": -2.71828, + "bool_true": True, + "bool_false": False, + "null_value": None, + "zero": 0, + "empty_string": "", + }, + ) - class StartExecutor(Executor): - @handler - async def run(self, message: str, ctx: WorkflowContext[str]) -> None: - await ctx.send_message(message, target_id="middle") + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) - class MiddleExecutor(Executor): - @handler - async def process(self, message: str, ctx: WorkflowContext[str]) -> None: - await ctx.send_message(message + "-processed", target_id="finish") + assert loaded.state == checkpoint.state - class FinishExecutor(Executor): - @handler - async def finish(self, message: str, ctx: WorkflowContext[Never, str]) -> None: - await ctx.yield_output(message + "-done") - storage = InMemoryCheckpointStorage() +async def test_file_checkpoint_storage_roundtrip_datetime(): + """Test that datetime objects roundtrip correctly via pickle encoding.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) - start = StartExecutor(id="start") - middle = MiddleExecutor(id="middle") - finish = FinishExecutor(id="finish") + now = datetime.now(timezone.utc) + specific_datetime = datetime(2025, 6, 15, 10, 30, 45, 123456, tzinfo=timezone.utc) - workflow = ( - WorkflowBuilder(max_iterations=10, start_executor=start, checkpoint_storage=storage) - .add_edge(start, middle) - .add_edge(middle, finish) - .build() - ) + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "current_time": now, + "specific_time": specific_datetime, + "nested": {"created_at": now, "updated_at": specific_datetime}, + }, + ) - # Run workflow - this creates checkpoints at each superstep - _ = [event async for event in workflow.run("hello", stream=True)] + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) - # Get all checkpoints sorted by timestamp - checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) + assert loaded.state["current_time"] == now + assert loaded.state["specific_time"] == specific_datetime + assert loaded.state["nested"]["created_at"] == now + assert loaded.state["nested"]["updated_at"] == specific_datetime - # Should have multiple checkpoints (one initial + one per superstep) - assert len(checkpoints) >= 2, f"Expected at least 2 checkpoints, got {len(checkpoints)}" - # Verify chaining: first checkpoint has no previous - assert checkpoints[0].previous_checkpoint_id is None +async def test_file_checkpoint_storage_roundtrip_dataclass(): + """Test that dataclass objects roundtrip correctly via pickle encoding.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) - # Subsequent checkpoints should chain to the previous one - for i in range(1, len(checkpoints)): - assert checkpoints[i].previous_checkpoint_id == checkpoints[i - 1].checkpoint_id, ( - f"Checkpoint {i} should chain to checkpoint {i - 1}" + custom_obj = _TestCustomData(name="test", value=42, tags=["a", "b", "c"]) + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "custom_data": custom_obj, + "nested": {"inner_data": custom_obj}, + }, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded.state["custom_data"] == custom_obj + assert loaded.state["custom_data"].name == "test" + assert loaded.state["custom_data"].value == 42 + assert loaded.state["custom_data"].tags == ["a", "b", "c"] + assert loaded.state["nested"]["inner_data"] == custom_obj + assert isinstance(loaded.state["custom_data"], _TestCustomData) + + +async def test_file_checkpoint_storage_roundtrip_tuple_and_set(): + """Test tuple/frozenset encoding behavior. + + Note: Tuples containing only JSON-native types become lists in JSON encoding. + Frozensets get pickled since they're not JSON-serializable collections. + For type-preserving tuple storage, wrap them in a dataclass or other non-JSON-native type. + """ + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + original_tuple = (1, "two", 3.0, None) + original_frozenset = frozenset({1, 2, 3}) + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "my_tuple": original_tuple, + "my_frozenset": original_frozenset, + "nested_tuple": {"inner": (10, 20, 30)}, + }, ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Tuples containing JSON-native values become lists (JSON doesn't have tuple type) + assert loaded.state["my_tuple"] == [1, "two", 3.0, None] + assert isinstance(loaded.state["my_tuple"], list) + + # Frozensets are pickled and preserve their type + assert loaded.state["my_frozenset"] == original_frozenset + assert isinstance(loaded.state["my_frozenset"], frozenset) + + # Nested tuples also become lists + assert loaded.state["nested_tuple"]["inner"] == [10, 20, 30] + + +async def test_file_checkpoint_storage_roundtrip_complex_nested_structures(): + """Test complex nested structures with mixed types roundtrip correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create complex nested structure mixing JSON-native and non-native types + complex_state = { + "level1": { + "level2": { + "level3": { + "deep_string": "hello", + "deep_int": 123, + "deep_datetime": datetime(2025, 1, 1, tzinfo=timezone.utc), + "deep_tuple": (1, 2, 3), + } + }, + "list_of_dicts": [ + {"a": 1, "b": datetime(2025, 2, 1, tzinfo=timezone.utc)}, + {"c": 2, "d": (4, 5, 6)}, + ], + }, + "mixed_list": [ + "string", + 42, + 3.14, + True, + None, + datetime(2025, 3, 1, tzinfo=timezone.utc), + (7, 8, 9), + ], + } + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state=complex_state, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify deep nested values + assert loaded.state["level1"]["level2"]["level3"]["deep_string"] == "hello" + assert loaded.state["level1"]["level2"]["level3"]["deep_int"] == 123 + assert loaded.state["level1"]["level2"]["level3"]["deep_datetime"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + # Tuples containing JSON-native values become lists + assert loaded.state["level1"]["level2"]["level3"]["deep_tuple"] == [1, 2, 3] + + # Verify list of dicts + assert loaded.state["level1"]["list_of_dicts"][0]["a"] == 1 + assert loaded.state["level1"]["list_of_dicts"][0]["b"] == datetime(2025, 2, 1, tzinfo=timezone.utc) + # Tuples containing JSON-native values become lists + assert loaded.state["level1"]["list_of_dicts"][1]["d"] == [4, 5, 6] + + # Verify mixed list with correct types + assert loaded.state["mixed_list"][0] == "string" + assert loaded.state["mixed_list"][1] == 42 + assert loaded.state["mixed_list"][5] == datetime(2025, 3, 1, tzinfo=timezone.utc) + # Tuples containing JSON-native values become lists + assert loaded.state["mixed_list"][6] == [7, 8, 9] + assert isinstance(loaded.state["mixed_list"][6], list) + + +async def test_file_checkpoint_storage_roundtrip_messages_with_complex_data(): + """Test that messages dict with Message objects roundtrips correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + msg1 = Message( + data={"text": "hello", "timestamp": datetime(2025, 1, 1, tzinfo=timezone.utc)}, + source_id="source", + target_id="target", + ) + msg2 = Message( + data=(1, 2, 3), + source_id="s2", + target_id=None, + ) + msg3 = Message( + data="simple string", + source_id="s3", + target_id="t3", + ) + + messages = { + "executor1": [msg1, msg2], + "executor2": [msg3], + } + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + messages=messages, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify messages structure and types + assert len(loaded.messages["executor1"]) == 2 + loaded_msg1 = loaded.messages["executor1"][0] + loaded_msg2 = loaded.messages["executor1"][1] + loaded_msg3 = loaded.messages["executor2"][0] + + # Verify Message type is preserved + assert isinstance(loaded_msg1, Message) + assert isinstance(loaded_msg2, Message) + assert isinstance(loaded_msg3, Message) + + # Verify Message fields + assert loaded_msg1.data["text"] == "hello" + assert loaded_msg1.data["timestamp"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded_msg1.source_id == "source" + assert loaded_msg1.target_id == "target" + + assert loaded_msg2.data == (1, 2, 3) + assert isinstance(loaded_msg2.data, tuple) + assert loaded_msg2.source_id == "s2" + assert loaded_msg2.target_id is None + + assert loaded_msg3.data == "simple string" + assert loaded_msg3.source_id == "s3" + assert loaded_msg3.target_id == "t3" + + +async def test_file_checkpoint_storage_roundtrip_pending_request_info_events(): + """Test that pending_request_info_events with WorkflowEvent objects roundtrip correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create request_info events using the proper WorkflowEvent factory + event1 = WorkflowEvent.request_info( + request_id="req123", + source_executor_id="executor1", + request_data="What is your name?", + response_type=str, + ) + event2 = WorkflowEvent.request_info( + request_id="req456", + source_executor_id="executor2", + request_data=_TestToolApprovalRequest( + tool_name="search", + arguments={"query": "test"}, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + response_type=bool, + ) + + pending_events = { + "req123": event1, + "req456": event2, + } + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + pending_request_info_events=pending_events, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify WorkflowEvent type is preserved + loaded_event1 = loaded.pending_request_info_events["req123"] + loaded_event2 = loaded.pending_request_info_events["req456"] + + assert isinstance(loaded_event1, WorkflowEvent) + assert isinstance(loaded_event2, WorkflowEvent) + + # Verify event1 fields + assert loaded_event1.type == "request_info" + assert loaded_event1.request_id == "req123" + assert loaded_event1.source_executor_id == "executor1" + assert loaded_event1.data == "What is your name?" + assert loaded_event1.response_type is str + + # Verify event2 fields with complex data + assert loaded_event2.type == "request_info" + assert loaded_event2.request_id == "req456" + assert loaded_event2.source_executor_id == "executor2" + assert isinstance(loaded_event2.data, _TestToolApprovalRequest) + assert loaded_event2.data.tool_name == "search" + assert loaded_event2.data.arguments == {"query": "test"} + assert loaded_event2.data.timestamp == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded_event2.response_type is bool + + +async def test_file_checkpoint_storage_roundtrip_full_checkpoint(): + """Test complete WorkflowCheckpoint roundtrip with all fields populated using proper types.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create proper Message objects + msg1 = Message(data="msg1", source_id="s", target_id="t") + msg2 = Message(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") + + # Create proper WorkflowEvent for pending request + pending_event = WorkflowEvent.request_info( + request_id="req1", + source_executor_id="exec1", + request_data=_TestApprovalRequest(action="approve", params=(1, 2, 3)), + response_type=bool, + ) + + checkpoint = WorkflowCheckpoint( + checkpoint_id="full-test-checkpoint", + workflow_name="comprehensive-test", + graph_signature_hash="hash-abc123", + previous_checkpoint_id="previous-checkpoint-id", + timestamp=datetime(2025, 6, 15, 12, 0, 0, tzinfo=timezone.utc).isoformat(), + messages={ + "exec1": [msg1], + "exec2": [msg2], + }, + state={ + "user_data": {"name": "test", "created": datetime(2025, 1, 1, tzinfo=timezone.utc)}, + "_executor_state": { + "exec1": _TestExecutorState(counter=5, history=["a", "b", "c"]), + }, + }, + pending_request_info_events={ + "req1": pending_event, + }, + iteration_count=10, + metadata={ + "superstep": 5, + "started_at": datetime(2025, 6, 15, 11, 0, 0, tzinfo=timezone.utc), + }, + version="1.0", + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + # Verify all scalar fields + assert loaded.checkpoint_id == checkpoint.checkpoint_id + assert loaded.workflow_name == checkpoint.workflow_name + assert loaded.graph_signature_hash == checkpoint.graph_signature_hash + assert loaded.previous_checkpoint_id == checkpoint.previous_checkpoint_id + assert loaded.timestamp == checkpoint.timestamp + assert loaded.iteration_count == checkpoint.iteration_count + assert loaded.version == checkpoint.version + + # Verify complex nested state data + assert loaded.state["user_data"]["created"] == datetime(2025, 1, 1, tzinfo=timezone.utc) + assert loaded.state["_executor_state"]["exec1"].counter == 5 + assert loaded.state["_executor_state"]["exec1"].history == ["a", "b", "c"] + assert isinstance(loaded.state["_executor_state"]["exec1"], _TestExecutorState) + + # Verify messages are proper Message objects + loaded_msg1 = loaded.messages["exec1"][0] + loaded_msg2 = loaded.messages["exec2"][0] + assert isinstance(loaded_msg1, Message) + assert isinstance(loaded_msg2, Message) + assert loaded_msg1.data == "msg1" + assert loaded_msg1.source_id == "s" + assert loaded_msg2.data == datetime(2025, 1, 1, tzinfo=timezone.utc) + + # Verify pending events are proper WorkflowEvent objects + loaded_event = loaded.pending_request_info_events["req1"] + assert isinstance(loaded_event, WorkflowEvent) + assert loaded_event.type == "request_info" + assert loaded_event.request_id == "req1" + assert isinstance(loaded_event.data, _TestApprovalRequest) + assert loaded_event.data.params == (1, 2, 3) + + # Verify metadata + assert loaded.metadata["superstep"] == 5 + assert loaded.metadata["started_at"] == datetime(2025, 6, 15, 11, 0, 0, tzinfo=timezone.utc) + + +async def test_file_checkpoint_storage_roundtrip_bytes(): + """Test that bytes objects roundtrip correctly via pickle encoding.""" + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + binary_data = b"\x00\x01\x02\xff\xfe\xfd" + unicode_bytes = "Hello 世界".encode("utf-8") + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "binary_data": binary_data, + "unicode_bytes": unicode_bytes, + "nested": {"inner_bytes": binary_data}, + }, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded.state["binary_data"] == binary_data + assert loaded.state["unicode_bytes"] == unicode_bytes + assert loaded.state["nested"]["inner_bytes"] == binary_data + assert isinstance(loaded.state["binary_data"], bytes) + + +async def test_file_checkpoint_storage_roundtrip_empty_collections(): + """Test that empty collections roundtrip correctly. + + Note: Empty tuples become empty lists (JSON doesn't have tuple type). + """ + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + checkpoint = WorkflowCheckpoint( + workflow_name="test-workflow", + graph_signature_hash="test-hash", + state={ + "empty_dict": {}, + "empty_list": [], + "empty_tuple": (), + "nested_empty": {"inner_dict": {}, "inner_list": []}, + }, + messages={}, + pending_request_info_events={}, + ) + + await storage.save(checkpoint) + loaded = await storage.load(checkpoint.checkpoint_id) + + assert loaded.state["empty_dict"] == {} + assert loaded.state["empty_list"] == [] + # Empty tuples become empty lists (JSON doesn't have tuple type) + assert loaded.state["empty_tuple"] == [] + assert isinstance(loaded.state["empty_tuple"], list) + assert loaded.state["nested_empty"]["inner_dict"] == {} + assert loaded.messages == {} + assert loaded.pending_request_info_events == {} + + +# endregion diff --git a/python/packages/core/tests/workflow/test_request_info_and_response.py b/python/packages/core/tests/workflow/test_request_info_and_response.py index d883ac1b27..05a7ed1ec5 100644 --- a/python/packages/core/tests/workflow/test_request_info_and_response.py +++ b/python/packages/core/tests/workflow/test_request_info_and_response.py @@ -3,7 +3,6 @@ from dataclasses import dataclass from agent_framework import ( - FileCheckpointStorage, WorkflowBuilder, WorkflowContext, WorkflowEvent, @@ -323,88 +322,3 @@ async def test_invalid_calculation_input(self): assert completed # Should not have any calculations performed due to invalid input assert len(executor.calculations_performed) == 0 - - async def test_checkpoint_with_pending_request_info_events(self): - """Test that request info events are properly serialized in checkpoints and can be restored.""" - import tempfile - - with tempfile.TemporaryDirectory() as temp_dir: - # Use file-based storage to test full serialization - storage = FileCheckpointStorage(temp_dir) - - # Create workflow with checkpointing enabled - executor = ApprovalRequiredExecutor(id="approval_executor") - workflow = WorkflowBuilder(start_executor=executor, checkpoint_storage=storage).build() - - # Step 1: Run workflow to completion to ensure checkpoints are created - request_info_event: WorkflowEvent | None = None - async for event in workflow.run("checkpoint test operation", stream=True): - if event.type == "request_info": - request_info_event = event - - # Verify request was emitted - assert request_info_event is not None - assert isinstance(request_info_event.data, UserApprovalRequest) - assert request_info_event.data.prompt == "Please approve the operation: checkpoint test operation" - assert request_info_event.source_executor_id == "approval_executor" - - # Step 2: List checkpoints to find the one with our pending request - checkpoints = await storage.list_checkpoints(workflow.name) - assert len(checkpoints) > 0, "No checkpoints were created during workflow execution" - - # Find the checkpoint with our pending request - checkpoint_with_request = None - for checkpoint in checkpoints: - if request_info_event.request_id in checkpoint.pending_request_info_events: - checkpoint_with_request = checkpoint - break - - assert checkpoint_with_request is not None, "No checkpoint found with pending request info event" - - # Step 3: Verify the pending request info event was properly serialized - serialized_event = checkpoint_with_request.pending_request_info_events[request_info_event.request_id] - assert serialized_event.data - assert serialized_event.request_type is UserApprovalRequest - assert serialized_event.request_id == request_info_event.request_id - assert serialized_event.source_executor_id == "approval_executor" - - # Step 4: Create a fresh workflow and restore from checkpoint - new_executor = ApprovalRequiredExecutor(id="approval_executor") - restored_workflow = WorkflowBuilder(start_executor=new_executor, checkpoint_storage=storage).build() - - # Step 5: Resume from checkpoint and verify the request can be continued - completed = False - restored_request_event: WorkflowEvent | None = None - async for event in restored_workflow.run(checkpoint_id=checkpoint_with_request.checkpoint_id, stream=True): - # Should re-emit the pending request info event - if event.type == "request_info" and event.request_id == request_info_event.request_id: - restored_request_event = event - elif event.type == "status" and event.state == WorkflowRunState.IDLE_WITH_PENDING_REQUESTS: - completed = True - - assert completed, "Workflow should reach idle with pending requests state after restoration" - assert restored_request_event is not None, "Restored request info event should be emitted" - - # Verify the restored event matches the original - assert restored_request_event.source_executor_id == request_info_event.source_executor_id - assert isinstance(restored_request_event.data, UserApprovalRequest) - assert restored_request_event.data.prompt == request_info_event.data.prompt - assert restored_request_event.data.context == request_info_event.data.context - - # Step 6: Provide response to the restored request and complete the workflow - final_completed = False - async for event in restored_workflow.run( - stream=True, - responses={ - request_info_event.request_id: True # Approve the request - }, - ): - if event.type == "status" and event.state == WorkflowRunState.IDLE: - final_completed = True - - assert final_completed, "Workflow should complete after providing response to restored request" - - # Step 7: Verify the executor state was properly restored and response was processed - assert new_executor.approval_received is True - expected_result = "Operation approved: Please approve the operation: checkpoint test operation" - assert new_executor.final_result == expected_result diff --git a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py index 7ed504ef0c..c2925ccc51 100644 --- a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py +++ b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py @@ -4,7 +4,13 @@ from dataclasses import dataclass, field from datetime import datetime, timezone -from agent_framework import InMemoryCheckpointStorage, InProcRunnerContext +from agent_framework import ( + FileCheckpointStorage, + InMemoryCheckpointStorage, + InProcRunnerContext, + WorkflowBuilder, + WorkflowRunState, +) from agent_framework._workflows._checkpoint_encoding import ( _PICKLE_MARKER, encode_checkpoint_value, @@ -12,6 +18,13 @@ from agent_framework._workflows._events import WorkflowEvent from agent_framework._workflows._state import State +from .test_request_info_and_response import ( + ApprovalRequiredExecutor, + CalculationRequest, + MultiRequestExecutor, + UserApprovalRequest, +) + @dataclass class MockRequest: ... @@ -109,3 +122,236 @@ async def test_request_info_event_serializes_non_json_payloads() -> None: rehydrated_2 = pending["req-2"] assert isinstance(rehydrated_2.data, SlottedApproval) assert rehydrated_2.data.note == "slot-based" + + +async def test_checkpoint_with_pending_request_info_events(): + """Test that request info events are properly serialized in checkpoints and can be restored.""" + import tempfile + + with tempfile.TemporaryDirectory() as temp_dir: + # Use file-based storage to test full serialization + storage = FileCheckpointStorage(temp_dir) + + # Create workflow with checkpointing enabled + executor = ApprovalRequiredExecutor(id="approval_executor") + workflow = WorkflowBuilder(start_executor=executor, checkpoint_storage=storage).build() + + # Step 1: Run workflow to completion to ensure checkpoints are created + request_info_event: WorkflowEvent | None = None + async for event in workflow.run("checkpoint test operation", stream=True): + if event.type == "request_info": + request_info_event = event + + # Verify request was emitted + assert request_info_event is not None + assert isinstance(request_info_event.data, UserApprovalRequest) + assert request_info_event.data.prompt == "Please approve the operation: checkpoint test operation" + assert request_info_event.source_executor_id == "approval_executor" + + # Step 2: List checkpoints to find the one with our pending request + checkpoints = await storage.list_checkpoints(workflow.name) + assert len(checkpoints) > 0, "No checkpoints were created during workflow execution" + + # Find the checkpoint with our pending request + checkpoint_with_request = None + for checkpoint in checkpoints: + if request_info_event.request_id in checkpoint.pending_request_info_events: + checkpoint_with_request = checkpoint + break + + assert checkpoint_with_request is not None, "No checkpoint found with pending request info event" + + # Step 3: Verify the pending request info event was properly serialized + serialized_event = checkpoint_with_request.pending_request_info_events[request_info_event.request_id] + assert serialized_event.data + assert serialized_event.request_type is UserApprovalRequest + assert serialized_event.request_id == request_info_event.request_id + assert serialized_event.source_executor_id == "approval_executor" + + # Step 4: Create a fresh workflow and restore from checkpoint + new_executor = ApprovalRequiredExecutor(id="approval_executor") + restored_workflow = WorkflowBuilder(start_executor=new_executor, checkpoint_storage=storage).build() + + # Step 5: Resume from checkpoint and verify the request can be continued + completed = False + restored_request_event: WorkflowEvent | None = None + async for event in restored_workflow.run(checkpoint_id=checkpoint_with_request.checkpoint_id, stream=True): + # Should re-emit the pending request info event + if event.type == "request_info" and event.request_id == request_info_event.request_id: + restored_request_event = event + elif event.type == "status" and event.state == WorkflowRunState.IDLE_WITH_PENDING_REQUESTS: + completed = True + + assert completed, "Workflow should reach idle with pending requests state after restoration" + assert restored_request_event is not None, "Restored request info event should be emitted" + + # Verify the restored event matches the original + assert restored_request_event.source_executor_id == request_info_event.source_executor_id + assert isinstance(restored_request_event.data, UserApprovalRequest) + assert restored_request_event.data.prompt == request_info_event.data.prompt + assert restored_request_event.data.context == request_info_event.data.context + + # Step 6: Provide response to the restored request and complete the workflow + final_completed = False + async for event in restored_workflow.run( + stream=True, + responses={ + request_info_event.request_id: True # Approve the request + }, + ): + if event.type == "status" and event.state == WorkflowRunState.IDLE: + final_completed = True + + assert final_completed, "Workflow should complete after providing response to restored request" + + # Step 7: Verify the executor state was properly restored and response was processed + assert new_executor.approval_received is True + expected_result = "Operation approved: Please approve the operation: checkpoint test operation" + assert new_executor.final_result == expected_result + + +async def test_checkpoint_restore_with_responses_does_not_reemit_handled_requests(): + """Test that request_info events are not re-emitted when responses are provided with checkpoint restore. + + When calling run(checkpoint_id=..., responses=...), the workflow restores from a checkpoint + that contains pending request_info events. Because responses are provided for those events, + they should NOT be re-emitted in the event stream - they are considered "handled". + + Note: The workflow's internal state tracking still sees the request_info events (before filtering), + so the final status may be IDLE_WITH_PENDING_REQUESTS even though the requests were handled. + The key behavior we're testing is that the CALLER doesn't see the request_info events. + """ + import tempfile + + with tempfile.TemporaryDirectory() as temp_dir: + # Use file-based storage to test full serialization + storage = FileCheckpointStorage(temp_dir) + + # Create workflow with checkpointing enabled + executor = ApprovalRequiredExecutor(id="approval_executor") + workflow = WorkflowBuilder(start_executor=executor, checkpoint_storage=storage).build() + + # Step 1: Run workflow until it emits a request_info event + request_info_event: WorkflowEvent | None = None + async for event in workflow.run("test pending request suppression", stream=True): + if event.type == "request_info": + request_info_event = event + + assert request_info_event is not None + request_id = request_info_event.request_id + + # Step 2: Find the checkpoint with the pending request + checkpoints = await storage.list_checkpoints(workflow.name) + checkpoint_with_request = None + for checkpoint in checkpoints: + if request_id in checkpoint.pending_request_info_events: + checkpoint_with_request = checkpoint + break + + assert checkpoint_with_request is not None + + # Step 3: Create a fresh workflow and restore from checkpoint WITH responses in one call + new_executor = ApprovalRequiredExecutor(id="approval_executor") + restored_workflow = WorkflowBuilder(start_executor=new_executor, checkpoint_storage=storage).build() + + # Track all emitted events + emitted_events: list[WorkflowEvent] = [] + async for event in restored_workflow.run( + checkpoint_id=checkpoint_with_request.checkpoint_id, + responses={request_id: True}, # Provide response for the pending request + stream=True, + ): + emitted_events.append(event) + + # Step 4: Verify the request_info event was NOT re-emitted to the caller + reemitted_request_info_events = [ + e for e in emitted_events if e.type == "request_info" and e.request_id == request_id + ] + assert len(reemitted_request_info_events) == 0, ( + f"request_info event should NOT be re-emitted when response is provided. " + f"Found {len(reemitted_request_info_events)} request_info events with request_id={request_id}" + ) + + # Step 5: Verify the response was processed by checking executor state + assert new_executor.approval_received is True, "Response should have been processed by the executor" + assert new_executor.final_result == ( + "Operation approved: Please approve the operation: test pending request suppression" + ) + + +async def test_checkpoint_restore_with_partial_responses_reemits_unhandled_requests(self): + """Test that only unhandled request_info events are re-emitted when partial responses are provided. + + When calling run(checkpoint_id=..., responses=...) with responses for only some of the + pending requests, only the unhandled request_info events should be re-emitted. + """ + import tempfile + + with tempfile.TemporaryDirectory() as temp_dir: + storage = FileCheckpointStorage(temp_dir) + + # Create workflow with multiple requests + executor = MultiRequestExecutor(id="multi_executor") + workflow = WorkflowBuilder(start_executor=executor, checkpoint_storage=storage).build() + + # Step 1: Run workflow until it emits multiple request_info events + request_events: list[WorkflowEvent] = [] + async for event in workflow.run("start batch", stream=True): + if event.type == "request_info": + request_events.append(event) + + assert len(request_events) == 2 + + # Find the approval and calculation requests + approval_event = next((e for e in request_events if isinstance(e.data, UserApprovalRequest)), None) + calc_event = next((e for e in request_events if isinstance(e.data, CalculationRequest)), None) + assert approval_event is not None + assert calc_event is not None + + # Step 2: Find the checkpoint with pending requests + checkpoints = await storage.list_checkpoints(workflow.name) + checkpoint_with_requests = None + for checkpoint in checkpoints: + has_approval = approval_event.request_id in checkpoint.pending_request_info_events + has_calc = calc_event.request_id in checkpoint.pending_request_info_events + if has_approval and has_calc: + checkpoint_with_requests = checkpoint + break + + assert checkpoint_with_requests is not None + + # Step 3: Restore from checkpoint with ONLY the approval response (not the calculation) + new_executor = MultiRequestExecutor(id="multi_executor") + restored_workflow = WorkflowBuilder(start_executor=new_executor, checkpoint_storage=storage).build() + + emitted_events: list[WorkflowEvent] = [] + async for event in restored_workflow.run( + checkpoint_id=checkpoint_with_requests.checkpoint_id, + responses={approval_event.request_id: True}, # Only respond to approval + stream=True, + ): + emitted_events.append(event) + + # Step 4: Verify the approval request_info was NOT re-emitted + reemitted_approval_events = [ + e for e in emitted_events if e.type == "request_info" and e.request_id == approval_event.request_id + ] + assert len(reemitted_approval_events) == 0, ( + "Approval request_info should NOT be re-emitted since response was provided" + ) + + # Step 5: Verify the calculation request_info WAS re-emitted (no response provided) + reemitted_calc_events = [ + e for e in emitted_events if e.type == "request_info" and e.request_id == calc_event.request_id + ] + assert len(reemitted_calc_events) == 1, ( + "Calculation request_info SHOULD be re-emitted since no response was provided" + ) + + # Step 6: Verify workflow is in IDLE_WITH_PENDING_REQUESTS state (calc still pending) + status_events = [e for e in emitted_events if e.type == "status"] + final_status = status_events[-1] if status_events else None + assert final_status is not None + assert final_status.state == WorkflowRunState.IDLE_WITH_PENDING_REQUESTS, ( + f"Workflow should be IDLE_WITH_PENDING_REQUESTS, got {final_status.state}" + ) diff --git a/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py b/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py new file mode 100644 index 0000000000..2cc78dd00b --- /dev/null +++ b/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py @@ -0,0 +1,236 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import json +from pathlib import Path +from typing import Any + +from agent_framework import ( + ChatAgent, + Content, + FileCheckpointStorage, + Workflow, + tool, +) +from agent_framework.azure import AzureOpenAIChatClient +from agent_framework.orchestrations import HandoffAgentUserRequest, HandoffBuilder +from azure.identity import AzureCliCredential + +""" +Sample: Handoff Workflow with Tool Approvals + Checkpoint Resume + +Demonstrates resuming a handoff workflow from a checkpoint while handling both +HandoffAgentUserRequest prompts and function approval request Content for tool calls +(e.g., submit_refund). + +Scenario: +1. User starts a conversation with the workflow. +2. Agents may emit user input requests or tool approval requests. +3. Workflow writes a checkpoint capturing pending requests and pauses. +4. Process can exit/restart. +5. On resume: Restore checkpoint, inspect pending requests, then provide responses. +6. Workflow continues from the saved state. + +Pattern: +- workflow.run(checkpoint_id=..., stream=True) to restore checkpoint and discover pending requests. +- workflow.run(stream=True, responses=responses) to supply human replies and approvals. + (Two steps are needed here because the sample must inspect request types before building responses. + When response payloads are already known, use the single-call form: + workflow.run(stream=True, checkpoint_id=..., responses=responses).) + +Prerequisites: +- Azure CLI authentication (az login). +- Environment variables configured for AzureOpenAIChatClient. +""" + +CHECKPOINT_DIR = Path(__file__).parent / "tmp" / "handoff_checkpoints" +CHECKPOINT_DIR.mkdir(parents=True, exist_ok=True) + + +@tool(approval_mode="always_require") +def submit_refund(refund_description: str, amount: str, order_id: str) -> str: + """Capture a refund request for manual review before processing.""" + return f"refund recorded for order {order_id} (amount: {amount}) with details: {refund_description}" + + +def create_agents(client: AzureOpenAIChatClient) -> tuple[ChatAgent, ChatAgent, ChatAgent]: + """Create a simple handoff scenario: triage, refund, and order specialists.""" + + triage = client.as_agent( + name="triage_agent", + instructions=( + "You are a customer service triage agent. Listen to customer issues and determine " + "if they need refund help or order tracking. Use handoff_to_refund_agent or " + "handoff_to_order_agent to transfer them." + ), + ) + + refund = client.as_agent( + name="refund_agent", + instructions=( + "You are a refund specialist. Help customers with refund requests. " + "Be empathetic and ask for order numbers if not provided. " + "When the user confirms they want a refund and supplies order details, call submit_refund " + "to record the request before continuing." + ), + tools=[submit_refund], + ) + + order = client.as_agent( + name="order_agent", + instructions=( + "You are an order tracking specialist. Help customers track their orders. " + "Ask for order numbers and provide shipping updates." + ), + ) + + return triage, refund, order + + +def create_workflow(checkpoint_storage: FileCheckpointStorage) -> Workflow: + """Build the handoff workflow with checkpointing enabled.""" + + client = AzureOpenAIChatClient(credential=AzureCliCredential()) + triage, refund, order = create_agents(client) + + # checkpoint_storage: Enable checkpointing for resume + # termination_condition: Terminate after 5 user messages for this demo + return ( + HandoffBuilder( + name="checkpoint_handoff_demo", + participants=[triage, refund, order], + checkpoint_storage=checkpoint_storage, + termination_condition=lambda conv: sum(1 for msg in conv if msg.role == "user") >= 5, + ) + .with_start_agent(triage) + .build() + ) + + +def print_handoff_agent_user_request(request: HandoffAgentUserRequest, request_id: str) -> None: + """Log pending handoff request details for debugging.""" + print(f"\n{'=' * 60}") + print("User input needed") + print(f"Request ID: {request_id}") + print(f"Awaiting agent: {request.agent_response.agent_id}") + + response = request.agent_response + if not response.messages: + print("(No agent messages)") + return + + for message in response.messages: + if not message.text: + continue + speaker = message.author_name or message.role + print(f"{speaker}: {message.text}") + + print(f"{'=' * 60}\n") + + +def print_function_approval_request(request: Content, request_id: str) -> None: + """Log pending tool approval details for debugging.""" + args = request.function_call.parse_arguments() or {} # type: ignore + print(f"\n{'=' * 60}") + print("Tool approval required") + print(f"Request ID: {request_id}") + print(f"Function: {request.function_call.name}") # type: ignore + print(f"Arguments:\n{json.dumps(args, indent=2)}") + print(f"{'=' * 60}\n") + + +async def get_latest_checkpoint_id(storage: FileCheckpointStorage, workflow_name: str) -> str: + """Helper to get the latest checkpoint ID for a workflow.""" + checkpoints = await storage.list_checkpoints(workflow_name) + if not checkpoints: + raise RuntimeError("No checkpoints found.") + checkpoints.sort(key=lambda cp: cp.timestamp, reverse=True) + return checkpoints[0].checkpoint_id + + +async def main() -> None: + """ + Demonstrate the checkpoint-based pause/resume pattern for handoff workflows. + + This sample shows: + 1. Starting a workflow and getting a HandoffAgentUserRequest + 2. Pausing (checkpoint is saved automatically) + 3. Resuming from checkpoint with a user response or tool approval + 4. Continuing the conversation until completion + """ + # Clean up old checkpoints + for file in CHECKPOINT_DIR.glob("*.json"): + file.unlink() + for file in CHECKPOINT_DIR.glob("*.json.tmp"): + file.unlink() + + storage = FileCheckpointStorage(storage_path=CHECKPOINT_DIR) + workflow = create_workflow(checkpoint_storage=storage) + + # Scripted human input for demo purposes + handoff_responses = [ + ( + "The headphones in order 12345 arrived cracked. " + "Please submit the refund for $89.99 and send a replacement to my original address." + ), + "Yes, that covers the damage and refund request.", + "That's everything I needed for the refund.", + "Thanks for handling the refund.", + ] + + print("=" * 60) + print("HANDOFF WORKFLOW CHECKPOINT DEMO") + print("=" * 60) + + # Scenario: User needs help with a damaged order + initial_request = "Hi, my order 12345 arrived damaged. I need a refund." + + # Phase 1: Initial run - workflow will pause when it needs user input + results = await workflow.run(message=initial_request) + request_events = results.get_request_info_events() + if not request_events: + print("Workflow completed without needing user input") + return + + print("=" * 60) + print("WORKFLOW PAUSED with pending requests") + print("=" * 60) + + # Phase 2: Running until no more user input is needed + # This creates a new workflow instance to simulate a fresh process start, + # but points it to the same checkpoint storage + while request_events: + print("=" * 60) + print("Simulating process restart...") + print("=" * 60) + + workflow = create_workflow(checkpoint_storage=storage) + + responses: dict[str, Any] = {} + for request_event in request_events: + print(f"Pending request ID: {request_event.request_id}, Type: {type(request_event.data)}") + if isinstance(request_event.data, HandoffAgentUserRequest): + print_handoff_agent_user_request(request_event.data, request_event.request_id) + response = handoff_responses.pop(0) + print(f"Responding with: {response}") + responses[request_event.request_id] = HandoffAgentUserRequest.create_response(response) + elif isinstance(request_event.data, Content) and request_event.data.type == "function_approval_request": + print_function_approval_request(request_event.data, request_event.request_id) + print("Approving tool call...") + responses[request_event.request_id] = request_event.data.to_function_approval_response(approved=True) + else: + # This sample only expects HandoffAgentUserRequest and function approval requests + raise ValueError(f"Unsupported request type: {type(request_event.data)}") + + checkpoint_id = await get_latest_checkpoint_id(storage, workflow.name) + + results = await workflow.run(responses=responses, checkpoint_id=checkpoint_id) + request_events = results.get_request_info_events() + + print("\n" + "=" * 60) + print("DEMO COMPLETE") + print("=" * 60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/orchestrations/magentic_checkpoint.py b/python/samples/getting_started/orchestrations/magentic_checkpoint.py index 08e26909e0..aaf5ad9c93 100644 --- a/python/samples/getting_started/orchestrations/magentic_checkpoint.py +++ b/python/samples/getting_started/orchestrations/magentic_checkpoint.py @@ -115,7 +115,7 @@ async def main() -> None: print("No plan review request emitted; nothing to resume.") return - checkpoints = await checkpoint_storage.list_checkpoints(workflow.id) + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) if not checkpoints: print("No checkpoints persisted.") return @@ -180,7 +180,7 @@ async def main() -> None: def _pending_message_count(cp: WorkflowCheckpoint) -> int: return sum(len(msg_list) for msg_list in cp.messages.values() if isinstance(msg_list, list)) - all_checkpoints = await checkpoint_storage.list_checkpoints(resume_checkpoint.workflow_id) + all_checkpoints = await checkpoint_storage.list_checkpoints(resume_checkpoint.workflow_name) later_checkpoints_with_messages = [ cp for cp in all_checkpoints diff --git a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py index b6c80d4203..dff3597f5b 100644 --- a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py +++ b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py @@ -177,9 +177,7 @@ def create_workflow(checkpoint_storage: FileCheckpointStorage) -> Workflow: # module docstring. Because `WorkflowBuilder` is declarative, reading these # edges is often the quickest way to understand execution order. workflow_builder = ( - WorkflowBuilder( - max_iterations=6, start_executor="prepare_brief", checkpoint_storage=checkpoint_storage - ) + WorkflowBuilder(max_iterations=6, start_executor="prepare_brief", checkpoint_storage=checkpoint_storage) .register_agent( lambda: AzureOpenAIChatClient(credential=AzureCliCredential()).as_agent( instructions="Write concise, warm release notes that sound human and helpful.", @@ -285,7 +283,7 @@ async def main() -> None: result = await run_interactive_session(workflow, initial_message=brief) print(f"Workflow completed with: {result}") - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) if not checkpoints: print("No checkpoints recorded.") return diff --git a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py index 7d453b6126..b9eb1cf78e 100644 --- a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py +++ b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py @@ -140,7 +140,7 @@ async def main(): break # Find the latest checkpoint to resume from - all_checkpoints = await checkpoint_storage.list_checkpoints() + all_checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) if not all_checkpoints: raise RuntimeError("No checkpoints available to resume from.") latest_checkpoint = all_checkpoints[-1] diff --git a/python/samples/getting_started/workflows/checkpoint/handoff_with_tool_approval_checkpoint_resume.py b/python/samples/getting_started/workflows/checkpoint/handoff_with_tool_approval_checkpoint_resume.py deleted file mode 100644 index 99875c94c6..0000000000 --- a/python/samples/getting_started/workflows/checkpoint/handoff_with_tool_approval_checkpoint_resume.py +++ /dev/null @@ -1,405 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import json -import logging -from pathlib import Path -from typing import cast - -from agent_framework import ( - AgentResponse, - ChatAgent, - ChatMessage, - Content, - FileCheckpointStorage, - Workflow, - WorkflowEvent, - tool, -) -from agent_framework.azure import AzureOpenAIChatClient -from agent_framework.orchestrations import HandoffAgentUserRequest, HandoffBuilder -from azure.identity import AzureCliCredential - -""" -Sample: Handoff Workflow with Tool Approvals + Checkpoint Resume - -Demonstrates resuming a handoff workflow from a checkpoint while handling both -HandoffAgentUserRequest prompts and function approval request Content for tool calls -(e.g., submit_refund). - -Scenario: -1. User starts a conversation with the workflow. -2. Agents may emit user input requests or tool approval requests. -3. Workflow writes a checkpoint capturing pending requests and pauses. -4. Process can exit/restart. -5. On resume: Restore checkpoint, inspect pending requests, then provide responses. -6. Workflow continues from the saved state. - -Pattern: -- workflow.run(checkpoint_id=..., stream=True) to restore checkpoint and discover pending requests. -- workflow.run(stream=True, responses=responses) to supply human replies and approvals. - (Two steps are needed here because the sample must inspect request types before building responses. - When response payloads are already known, use the single-call form: - workflow.run(stream=True, checkpoint_id=..., responses=responses).) - -Prerequisites: -- Azure CLI authentication (az login). -- Environment variables configured for AzureOpenAIChatClient. -""" - -CHECKPOINT_DIR = Path(__file__).parent / "tmp" / "handoff_checkpoints" -CHECKPOINT_DIR.mkdir(parents=True, exist_ok=True) - - -@tool(approval_mode="always_require") -def submit_refund(refund_description: str, amount: str, order_id: str) -> str: - """Capture a refund request for manual review before processing.""" - return f"refund recorded for order {order_id} (amount: {amount}) with details: {refund_description}" - - -def create_agents(client: AzureOpenAIChatClient) -> tuple[ChatAgent, ChatAgent, ChatAgent]: - """Create a simple handoff scenario: triage, refund, and order specialists.""" - - triage = client.as_agent( - name="triage_agent", - instructions=( - "You are a customer service triage agent. Listen to customer issues and determine " - "if they need refund help or order tracking. Use handoff_to_refund_agent or " - "handoff_to_order_agent to transfer them." - ), - ) - - refund = client.as_agent( - name="refund_agent", - instructions=( - "You are a refund specialist. Help customers with refund requests. " - "Be empathetic and ask for order numbers if not provided. " - "When the user confirms they want a refund and supplies order details, call submit_refund " - "to record the request before continuing." - ), - tools=[submit_refund], - ) - - order = client.as_agent( - name="order_agent", - instructions=( - "You are an order tracking specialist. Help customers track their orders. " - "Ask for order numbers and provide shipping updates." - ), - ) - - return triage, refund, order - - -def create_workflow(checkpoint_storage: FileCheckpointStorage) -> tuple[Workflow, ChatAgent, ChatAgent, ChatAgent]: - """Build the handoff workflow with checkpointing enabled.""" - - client = AzureOpenAIChatClient(credential=AzureCliCredential()) - triage, refund, order = create_agents(client) - - # checkpoint_storage: Enable checkpointing for resume - # termination_condition: Terminate after 5 user messages for this demo - workflow = ( - HandoffBuilder( - name="checkpoint_handoff_demo", - participants=[triage, refund, order], - checkpoint_storage=checkpoint_storage, - termination_condition=lambda conv: sum(1 for msg in conv if msg.role == "user") >= 5, - ) - .with_start_agent(triage) - .build() - ) - - return workflow, triage, refund, order - - -def _print_handoff_agent_user_request(response: AgentResponse) -> None: - """Display the agent's response messages when requesting user input.""" - if not response.messages: - print("(No agent messages)") - return - - print("\n[Agent is requesting your input...]") - for message in response.messages: - if not message.text: - continue - speaker = message.author_name or message.role - print(f" {speaker}: {message.text}") - - -def _print_handoff_request(request: HandoffAgentUserRequest, request_id: str) -> None: - """Log pending handoff request details for debugging.""" - print(f"\n{'=' * 60}") - print("WORKFLOW PAUSED - User input needed") - print(f"Request ID: {request_id}") - print(f"Awaiting agent: {request.agent_response.agent_id}") - - _print_handoff_agent_user_request(request.agent_response) - - print(f"{'=' * 60}\n") - - -def _print_function_approval_request(request: Content, request_id: str) -> None: - """Log pending tool approval details for debugging.""" - args = request.function_call.parse_arguments() or {} # type: ignore - print(f"\n{'=' * 60}") - print("WORKFLOW PAUSED - Tool approval required") - print(f"Request ID: {request_id}") - print(f"Function: {request.function_call.name}") # type: ignore - print(f"Arguments:\n{json.dumps(args, indent=2)}") - print(f"{'=' * 60}\n") - - -def _build_responses_for_requests( - pending_requests: list[WorkflowEvent], - *, - user_response: str | None, - approve_tools: bool | None, -) -> dict[str, object]: - """Create response payloads for each pending request.""" - responses: dict[str, object] = {} - for request in pending_requests: - if isinstance(request.data, HandoffAgentUserRequest) and request.request_id: - if user_response is None: - raise ValueError("User response is required for HandoffAgentUserRequest") - responses[request.request_id] = user_response - elif ( - isinstance(request.data, Content) - and request.data.type == "function_approval_request" - and request.request_id - ): - if approve_tools is None: - raise ValueError("Approval decision is required for function approval request") - responses[request.request_id] = request.data.to_function_approval_response(approved=approve_tools) - else: - raise ValueError(f"Unsupported request type: {type(request.data)}") - return responses - - -async def run_until_user_input_needed( - workflow: Workflow, - initial_message: str | None = None, - checkpoint_id: str | None = None, -) -> tuple[list[WorkflowEvent], str | None]: - """ - Run the workflow until it needs user input or approval, or completes. - - Returns: - Tuple of (pending_requests, checkpoint_id_to_use_for_resume) - """ - pending_requests: list[WorkflowEvent] = [] - latest_checkpoint_id: str | None = checkpoint_id - - if initial_message: - print(f"\nStarting workflow with: {initial_message}\n") - event_stream = workflow.run(message=initial_message, stream=True) # type: ignore[attr-defined] - elif checkpoint_id: - print(f"\nResuming workflow from checkpoint: {checkpoint_id}\n") - event_stream = workflow.run(checkpoint_id=checkpoint_id, stream=True) # type: ignore[attr-defined] - else: - raise ValueError("Must provide either initial_message or checkpoint_id") - - async for event in event_stream: - if event.type == "status": - print(f"[Status] {event.state}") - - elif event.type == "request_info": - pending_requests.append(event) - if isinstance(event.data, HandoffAgentUserRequest): - _print_handoff_request(event.data, event.request_id) - elif isinstance(event.data, Content) and event.data.type == "function_approval_request": - _print_function_approval_request(event.data, event.request_id) - - elif event.type == "output": - print("\n[Workflow Completed]") - if event.data: - print(f"Final conversation length: {len(event.data)} messages") - return [], None - - # Workflow paused with pending requests - # The latest checkpoint was created at the end of the last superstep - # We'll use the checkpoint storage to find it - return pending_requests, latest_checkpoint_id - - -async def resume_with_responses( - workflow: Workflow, - checkpoint_storage: FileCheckpointStorage, - user_response: str | None = None, - approve_tools: bool | None = None, -) -> tuple[list[WorkflowEvent], str | None]: - """ - Resume from checkpoint and send responses. - - Step 1: Restore checkpoint to discover pending request types. - Step 2: Build typed responses and send via workflow.run(responses=...). - - When response payloads are already known, these can be combined into a single - workflow.run(stream=True, checkpoint_id=..., responses=...) call. - """ - print(f"\n{'=' * 60}") - print("RESUMING WORKFLOW WITH HUMAN INPUT") - if user_response is not None: - print(f"User says: {user_response}") - if approve_tools is not None: - print(f"Approve tools: {approve_tools}") - print(f"{'=' * 60}\n") - - # Get the latest checkpoint - checkpoints = await checkpoint_storage.list_checkpoints() - if not checkpoints: - raise RuntimeError("No checkpoints found to resume from") - - # Sort by timestamp to get latest - checkpoints.sort(key=lambda cp: cp.timestamp, reverse=True) - latest_checkpoint = checkpoints[0] - - print(f"Restoring checkpoint {latest_checkpoint.checkpoint_id}") - - # First, restore checkpoint to discover pending requests - restored_requests: list[WorkflowEvent] = [] - async for event in workflow.run(checkpoint_id=latest_checkpoint.checkpoint_id, stream=True): # type: ignore[attr-defined] - if event.type == "request_info": - restored_requests.append(event) - if isinstance(event.data, HandoffAgentUserRequest): - _print_handoff_request(event.data, event.request_id) - elif isinstance(event.data, Content) and event.data.type == "function_approval_request": - _print_function_approval_request(event.data, event.request_id) - - if not restored_requests: - raise RuntimeError("No pending requests found after checkpoint restoration") - - responses = _build_responses_for_requests( - restored_requests, - user_response=user_response, - approve_tools=approve_tools, - ) - print(f"Sending responses for {len(responses)} request(s)") - - new_pending_requests: list[WorkflowEvent] = [] - - async for event in workflow.run(stream=True, responses=responses): - if event.type == "status": - print(f"[Status] {event.state}") - - elif event.type == "output": - print("\n[Workflow Output Event - Conversation Update]") - if event.data and isinstance(event.data, list) and all(isinstance(msg, ChatMessage) for msg in event.data): # type: ignore - # Now safe to cast event.data to list[ChatMessage] - conversation = cast(list[ChatMessage], event.data) # type: ignore - for msg in conversation[-3:]: # Show last 3 messages - author = msg.author_name or msg.role - text = msg.text[:100] + "..." if len(msg.text) > 100 else msg.text - print(f" {author}: {text}") - - elif event.type == "request_info": - new_pending_requests.append(event) - if isinstance(event.data, HandoffAgentUserRequest): - _print_handoff_request(event.data, event.request_id) - elif isinstance(event.data, Content) and event.data.type == "function_approval_request": - _print_function_approval_request(event.data, event.request_id) - - return new_pending_requests, latest_checkpoint.checkpoint_id - - -async def main() -> None: - """ - Demonstrate the checkpoint-based pause/resume pattern for handoff workflows. - - This sample shows: - 1. Starting a workflow and getting a HandoffAgentUserRequest - 2. Pausing (checkpoint is saved automatically) - 3. Resuming from checkpoint with a user response or tool approval - 4. Continuing the conversation until completion - """ - - # Enable INFO logging to see workflow progress - logging.basicConfig( - level=logging.INFO, - format="[%(levelname)s] %(name)s: %(message)s", - ) - - # Clean up old checkpoints - for file in CHECKPOINT_DIR.glob("*.json"): - file.unlink() - for file in CHECKPOINT_DIR.glob("*.json.tmp"): - file.unlink() - - storage = FileCheckpointStorage(storage_path=CHECKPOINT_DIR) - workflow, _, _, _ = create_workflow(checkpoint_storage=storage) - - print("=" * 60) - print("HANDOFF WORKFLOW CHECKPOINT DEMO") - print("=" * 60) - - # Scenario: User needs help with a damaged order - initial_request = "Hi, my order 12345 arrived damaged. I need a refund." - - # Phase 1: Initial run - workflow will pause when it needs user input - pending_requests, _ = await run_until_user_input_needed( - workflow, - initial_message=initial_request, - ) - - if not pending_requests: - print("Workflow completed without needing user input") - return - - print("\n>>> Workflow paused. You could exit the process here.") - print(f">>> Checkpoint was saved. Pending requests: {len(pending_requests)}") - - # Scripted human input for demo purposes - handoff_responses = [ - ( - "The headphones in order 12345 arrived cracked. " - "Please submit the refund for $89.99 and send a replacement to my original address." - ), - "Yes, that covers the damage and refund request.", - "That's everything I needed for the refund.", - "Thanks for handling the refund.", - ] - approval_decisions = [True, True, True] - handoff_index = 0 - approval_index = 0 - - while pending_requests: - print("\n>>> Simulating process restart...\n") - workflow_step, _, _, _ = create_workflow(checkpoint_storage=storage) - - needs_user_input = any(isinstance(req.data, HandoffAgentUserRequest) for req in pending_requests) - needs_tool_approval = any( - isinstance(req.data, Content) and req.data.type == "function_approval_request" for req in pending_requests - ) - - user_response = None - if needs_user_input: - if handoff_index < len(handoff_responses): - user_response = handoff_responses[handoff_index] - handoff_index += 1 - else: - user_response = handoff_responses[-1] - print(f">>> Responding to handoff request with: {user_response}") - - approval_response = None - if needs_tool_approval: - if approval_index < len(approval_decisions): - approval_response = approval_decisions[approval_index] - approval_index += 1 - else: - approval_response = approval_decisions[-1] - print(">>> Approving pending tool calls from the agent.") - - pending_requests, _ = await resume_with_responses( - workflow_step, - storage, - user_response=user_response, - approve_tools=approval_response, - ) - - print("\n" + "=" * 60) - print("DEMO COMPLETE") - print("=" * 60) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py b/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py index c975a10ae1..9027304514 100644 --- a/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py +++ b/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py @@ -345,7 +345,7 @@ async def main() -> None: if request_id is None: raise RuntimeError("Sub-workflow completed without requesting review.") - checkpoints = await storage.list_checkpoints(workflow.id) + checkpoints = await storage.list_checkpoints(workflow.name) if not checkpoints: raise RuntimeError("No checkpoints found.") diff --git a/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py b/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py index 18a0cf9258..1cd874ed14 100644 --- a/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py +++ b/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py @@ -73,7 +73,7 @@ def create_reviewer() -> ChatAgent: print(f"[{speaker}]: {msg.text}") # Show checkpoints that were created - checkpoints = await checkpoint_storage.list_checkpoints(workflow.id) + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) print(f"\nCheckpoints created: {len(checkpoints)}") for i, cp in enumerate(checkpoints[:5], 1): print(f" {i}. {cp.checkpoint_id}") @@ -115,7 +115,7 @@ def create_assistant() -> ChatAgent: print(f"[assistant]: {response2.messages[0].text}") # Show accumulated state - checkpoints = await checkpoint_storage.list_checkpoints(workflow.id) + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) print(f"\nTotal checkpoints across both turns: {len(checkpoints)}") if thread.message_store: @@ -153,7 +153,7 @@ def create_assistant() -> ChatAgent: print() # Newline after streaming - checkpoints = await checkpoint_storage.list_checkpoints(workflow.id) + checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) print(f"\nCheckpoints created during stream: {len(checkpoints)}") From 0a830cf0c8c67321f965f94b21986d0244b7b1d6 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 9 Feb 2026 14:30:36 -0800 Subject: [PATCH 09/16] fix unit tests --- .../core/tests/workflow/test_checkpoint.py | 4 +- .../test_request_info_event_rehydrate.py | 2 +- python/uv.lock | 234 ++++++++++-------- 3 files changed, 127 insertions(+), 113 deletions(-) diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index e39f2c0862..342cf9f022 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -714,7 +714,7 @@ async def test_memory_checkpoint_storage_roundtrip_bytes(): storage = InMemoryCheckpointStorage() binary_data = b"\x00\x01\x02\xff\xfe\xfd" - unicode_bytes = "Hello 世界".encode("utf-8") + unicode_bytes = "Hello 世界".encode() checkpoint = WorkflowCheckpoint( workflow_name="test-workflow", @@ -1386,7 +1386,7 @@ async def test_file_checkpoint_storage_roundtrip_bytes(): storage = FileCheckpointStorage(temp_dir) binary_data = b"\x00\x01\x02\xff\xfe\xfd" - unicode_bytes = "Hello 世界".encode("utf-8") + unicode_bytes = "Hello 世界".encode() checkpoint = WorkflowCheckpoint( workflow_name="test-workflow", diff --git a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py index c2925ccc51..0bd71e2d5d 100644 --- a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py +++ b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py @@ -279,7 +279,7 @@ async def test_checkpoint_restore_with_responses_does_not_reemit_handled_request ) -async def test_checkpoint_restore_with_partial_responses_reemits_unhandled_requests(self): +async def test_checkpoint_restore_with_partial_responses_reemits_unhandled_requests(): """Test that only unhandled request_info events are re-emitted when partial responses are provided. When calling run(checkpoint_id=..., responses=...) with responses for only some of the diff --git a/python/uv.lock b/python/uv.lock index e762f40433..9cd2ceb656 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -1542,101 +1542,115 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/43/3e4ac666cc35f231fa70c94e9f38459299de1a152813f9d2f60fc5f3ecaf/coverage-7.13.3.tar.gz", hash = "sha256:f7f6182d3dfb8802c1747eacbfe611b669455b69b7c037484bb1efbbb56711ac", size = 826832, upload-time = "2026-02-03T14:02:30.944Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/07/1c8099563a8a6c389a31c2d0aa1497cee86d6248bb4b9ba5e779215db9f9/coverage-7.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b4f345f7265cdbdb5ec2521ffff15fa49de6d6c39abf89fc7ad68aa9e3a55f0", size = 219143, upload-time = "2026-02-03T13:59:40.459Z" }, - { url = "https://files.pythonhosted.org/packages/69/39/a892d44af7aa092cab70e0cc5cdbba18eeccfe1d6930695dab1742eef9e9/coverage-7.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96c3be8bae9d0333e403cc1a8eb078a7f928b5650bae94a18fb4820cc993fb9b", size = 219663, upload-time = "2026-02-03T13:59:41.951Z" }, - { url = "https://files.pythonhosted.org/packages/9a/25/9669dcf4c2bb4c3861469e6db20e52e8c11908cf53c14ec9b12e9fd4d602/coverage-7.13.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d6f4a21328ea49d38565b55599e1c02834e76583a6953e5586d65cb1efebd8f8", size = 246424, upload-time = "2026-02-03T13:59:43.418Z" }, - { url = "https://files.pythonhosted.org/packages/f3/68/d9766c4e298aca62ea5d9543e1dd1e4e1439d7284815244d8b7db1840bfb/coverage-7.13.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fc970575799a9d17d5c3fafd83a0f6ccf5d5117cdc9ad6fbd791e9ead82418b0", size = 248228, upload-time = "2026-02-03T13:59:44.816Z" }, - { url = "https://files.pythonhosted.org/packages/f0/e2/eea6cb4a4bd443741adf008d4cccec83a1f75401df59b6559aca2bdd9710/coverage-7.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:87ff33b652b3556b05e204ae20793d1f872161b0fa5ec8a9ac76f8430e152ed6", size = 250103, upload-time = "2026-02-03T13:59:46.271Z" }, - { url = "https://files.pythonhosted.org/packages/db/77/664280ecd666c2191610842177e2fab9e5dbdeef97178e2078fed46a3d2c/coverage-7.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7df8759ee57b9f3f7b66799b7660c282f4375bef620ade1686d6a7b03699e75f", size = 247107, upload-time = "2026-02-03T13:59:48.53Z" }, - { url = "https://files.pythonhosted.org/packages/2b/df/2a672eab99e0d0eba52d8a63e47dc92245eee26954d1b2d3c8f7d372151f/coverage-7.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f45c9bcb16bee25a798ccba8a2f6a1251b19de6a0d617bb365d7d2f386c4e20e", size = 248143, upload-time = "2026-02-03T13:59:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/a5/dc/a104e7a87c13e57a358b8b9199a8955676e1703bb372d79722b54978ae45/coverage-7.13.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:318b2e4753cbf611061e01b6cc81477e1cdfeb69c36c4a14e6595e674caadb56", size = 246148, upload-time = "2026-02-03T13:59:52.025Z" }, - { url = "https://files.pythonhosted.org/packages/2b/89/e113d3a58dc20b03b7e59aed1e53ebc9ca6167f961876443e002b10e3ae9/coverage-7.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:24db3959de8ee394eeeca89ccb8ba25305c2da9a668dd44173394cbd5aa0777f", size = 246414, upload-time = "2026-02-03T13:59:53.859Z" }, - { url = "https://files.pythonhosted.org/packages/3f/60/a3fd0a6e8d89b488396019a2268b6a1f25ab56d6d18f3be50f35d77b47dc/coverage-7.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:be14d0622125edef21b3a4d8cd2d138c4872bf6e38adc90fd92385e3312f406a", size = 247023, upload-time = "2026-02-03T13:59:55.454Z" }, - { url = "https://files.pythonhosted.org/packages/19/fa/de4840bb939dbb22ba0648a6d8069fa91c9cf3b3fca8b0d1df461e885b3d/coverage-7.13.3-cp310-cp310-win32.whl", hash = "sha256:53be4aab8ddef18beb6188f3a3fdbf4d1af2277d098d4e618be3a8e6c88e74be", size = 221751, upload-time = "2026-02-03T13:59:57.383Z" }, - { url = "https://files.pythonhosted.org/packages/de/87/233ff8b7ef62fb63f58c78623b50bef69681111e0c4d43504f422d88cda4/coverage-7.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:bfeee64ad8b4aae3233abb77eb6b52b51b05fa89da9645518671b9939a78732b", size = 222686, upload-time = "2026-02-03T13:59:58.825Z" }, - { url = "https://files.pythonhosted.org/packages/ec/09/1ac74e37cf45f17eb41e11a21854f7f92a4c2d6c6098ef4a1becb0c6d8d3/coverage-7.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5907605ee20e126eeee2abe14aae137043c2c8af2fa9b38d2ab3b7a6b8137f73", size = 219276, upload-time = "2026-02-03T14:00:00.296Z" }, - { url = "https://files.pythonhosted.org/packages/2e/cb/71908b08b21beb2c437d0d5870c4ec129c570ca1b386a8427fcdb11cf89c/coverage-7.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a88705500988c8acad8b8fd86c2a933d3aa96bec1ddc4bc5cb256360db7bbd00", size = 219776, upload-time = "2026-02-03T14:00:02.414Z" }, - { url = "https://files.pythonhosted.org/packages/09/85/c4f3dd69232887666a2c0394d4be21c60ea934d404db068e6c96aa59cd87/coverage-7.13.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bbb5aa9016c4c29e3432e087aa29ebee3f8fda089cfbfb4e6d64bd292dcd1c2", size = 250196, upload-time = "2026-02-03T14:00:04.197Z" }, - { url = "https://files.pythonhosted.org/packages/9c/cc/560ad6f12010344d0778e268df5ba9aa990aacccc310d478bf82bf3d302c/coverage-7.13.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0c2be202a83dde768937a61cdc5d06bf9fb204048ca199d93479488e6247656c", size = 252111, upload-time = "2026-02-03T14:00:05.639Z" }, - { url = "https://files.pythonhosted.org/packages/f0/66/3193985fb2c58e91f94cfbe9e21a6fdf941e9301fe2be9e92c072e9c8f8c/coverage-7.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f45e32ef383ce56e0ca099b2e02fcdf7950be4b1b56afaab27b4ad790befe5b", size = 254217, upload-time = "2026-02-03T14:00:07.738Z" }, - { url = "https://files.pythonhosted.org/packages/c5/78/f0f91556bf1faa416792e537c523c5ef9db9b1d32a50572c102b3d7c45b3/coverage-7.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6ed2e787249b922a93cd95c671cc9f4c9797a106e81b455c83a9ddb9d34590c0", size = 250318, upload-time = "2026-02-03T14:00:09.224Z" }, - { url = "https://files.pythonhosted.org/packages/6f/aa/fc654e45e837d137b2c1f3a2cc09b4aea1e8b015acd2f774fa0f3d2ddeba/coverage-7.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:05dd25b21afffe545e808265897c35f32d3e4437663923e0d256d9ab5031fb14", size = 251909, upload-time = "2026-02-03T14:00:10.712Z" }, - { url = "https://files.pythonhosted.org/packages/73/4d/ab53063992add8a9ca0463c9d92cce5994a29e17affd1c2daa091b922a93/coverage-7.13.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46d29926349b5c4f1ea4fca95e8c892835515f3600995a383fa9a923b5739ea4", size = 249971, upload-time = "2026-02-03T14:00:12.402Z" }, - { url = "https://files.pythonhosted.org/packages/29/25/83694b81e46fcff9899694a1b6f57573429cdd82b57932f09a698f03eea5/coverage-7.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fae6a21537519c2af00245e834e5bf2884699cc7c1055738fd0f9dc37a3644ad", size = 249692, upload-time = "2026-02-03T14:00:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/d4/ef/d68fc304301f4cb4bf6aefa0045310520789ca38dabdfba9dbecd3f37919/coverage-7.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c672d4e2f0575a4ca2bf2aa0c5ced5188220ab806c1bb6d7179f70a11a017222", size = 250597, upload-time = "2026-02-03T14:00:15.461Z" }, - { url = "https://files.pythonhosted.org/packages/8d/85/240ad396f914df361d0f71e912ddcedb48130c71b88dc4193fe3c0306f00/coverage-7.13.3-cp311-cp311-win32.whl", hash = "sha256:fcda51c918c7a13ad93b5f89a58d56e3a072c9e0ba5c231b0ed81404bf2648fb", size = 221773, upload-time = "2026-02-03T14:00:17.462Z" }, - { url = "https://files.pythonhosted.org/packages/2f/71/165b3a6d3d052704a9ab52d11ea64ef3426745de517dda44d872716213a7/coverage-7.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:d1a049b5c51b3b679928dd35e47c4a2235e0b6128b479a7596d0ef5b42fa6301", size = 222711, upload-time = "2026-02-03T14:00:19.449Z" }, - { url = "https://files.pythonhosted.org/packages/51/d0/0ddc9c5934cdd52639c5df1f1eb0fdab51bb52348f3a8d1c7db9c600d93a/coverage-7.13.3-cp311-cp311-win_arm64.whl", hash = "sha256:79f2670c7e772f4917895c3d89aad59e01f3dbe68a4ed2d0373b431fad1dcfba", size = 221377, upload-time = "2026-02-03T14:00:20.968Z" }, - { url = "https://files.pythonhosted.org/packages/94/44/330f8e83b143f6668778ed61d17ece9dc48459e9e74669177de02f45fec5/coverage-7.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ed48b4170caa2c4420e0cd27dc977caaffc7eecc317355751df8373dddcef595", size = 219441, upload-time = "2026-02-03T14:00:22.585Z" }, - { url = "https://files.pythonhosted.org/packages/08/e7/29db05693562c2e65bdf6910c0af2fd6f9325b8f43caf7a258413f369e30/coverage-7.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8f2adf4bcffbbec41f366f2e6dffb9d24e8172d16e91da5799c9b7ed6b5716e6", size = 219801, upload-time = "2026-02-03T14:00:24.186Z" }, - { url = "https://files.pythonhosted.org/packages/90/ae/7f8a78249b02b0818db46220795f8ac8312ea4abd1d37d79ea81db5cae81/coverage-7.13.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01119735c690786b6966a1e9f098da4cd7ca9174c4cfe076d04e653105488395", size = 251306, upload-time = "2026-02-03T14:00:25.798Z" }, - { url = "https://files.pythonhosted.org/packages/62/71/a18a53d1808e09b2e9ebd6b47dad5e92daf4c38b0686b4c4d1b2f3e42b7f/coverage-7.13.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8bb09e83c603f152d855f666d70a71765ca8e67332e5829e62cb9466c176af23", size = 254051, upload-time = "2026-02-03T14:00:27.474Z" }, - { url = "https://files.pythonhosted.org/packages/4a/0a/eb30f6455d04c5a3396d0696cad2df0269ae7444bb322f86ffe3376f7bf9/coverage-7.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b607a40cba795cfac6d130220d25962931ce101f2f478a29822b19755377fb34", size = 255160, upload-time = "2026-02-03T14:00:29.024Z" }, - { url = "https://files.pythonhosted.org/packages/7b/7e/a45baac86274ce3ed842dbb84f14560c673ad30535f397d89164ec56c5df/coverage-7.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:44f14a62f5da2e9aedf9080e01d2cda61df39197d48e323538ec037336d68da8", size = 251709, upload-time = "2026-02-03T14:00:30.641Z" }, - { url = "https://files.pythonhosted.org/packages/c0/df/dd0dc12f30da11349993f3e218901fdf82f45ee44773596050c8f5a1fb25/coverage-7.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:debf29e0b157769843dff0981cc76f79e0ed04e36bb773c6cac5f6029054bd8a", size = 253083, upload-time = "2026-02-03T14:00:32.14Z" }, - { url = "https://files.pythonhosted.org/packages/ab/32/fc764c8389a8ce95cb90eb97af4c32f392ab0ac23ec57cadeefb887188d3/coverage-7.13.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:824bb95cd71604031ae9a48edb91fd6effde669522f960375668ed21b36e3ec4", size = 251227, upload-time = "2026-02-03T14:00:34.721Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ca/d025e9da8f06f24c34d2da9873957cfc5f7e0d67802c3e34d0caa8452130/coverage-7.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8f1010029a5b52dc427c8e2a8dbddb2303ddd180b806687d1acd1bb1d06649e7", size = 250794, upload-time = "2026-02-03T14:00:36.278Z" }, - { url = "https://files.pythonhosted.org/packages/45/c7/76bf35d5d488ec8f68682eb8e7671acc50a6d2d1c1182de1d2b6d4ffad3b/coverage-7.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cd5dee4fd7659d8306ffa79eeaaafd91fa30a302dac3af723b9b469e549247e0", size = 252671, upload-time = "2026-02-03T14:00:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/bf/10/1921f1a03a7c209e1cb374f81a6b9b68b03cdb3ecc3433c189bc90e2a3d5/coverage-7.13.3-cp312-cp312-win32.whl", hash = "sha256:f7f153d0184d45f3873b3ad3ad22694fd73aadcb8cdbc4337ab4b41ea6b4dff1", size = 221986, upload-time = "2026-02-03T14:00:40.442Z" }, - { url = "https://files.pythonhosted.org/packages/3c/7c/f5d93297f8e125a80c15545edc754d93e0ed8ba255b65e609b185296af01/coverage-7.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:03a6e5e1e50819d6d7436f5bc40c92ded7e484e400716886ac921e35c133149d", size = 222793, upload-time = "2026-02-03T14:00:42.106Z" }, - { url = "https://files.pythonhosted.org/packages/43/59/c86b84170015b4555ebabca8649bdf9f4a1f737a73168088385ed0f947c4/coverage-7.13.3-cp312-cp312-win_arm64.whl", hash = "sha256:51c4c42c0e7d09a822b08b6cf79b3c4db8333fffde7450da946719ba0d45730f", size = 221410, upload-time = "2026-02-03T14:00:43.726Z" }, - { url = "https://files.pythonhosted.org/packages/81/f3/4c333da7b373e8c8bfb62517e8174a01dcc373d7a9083698e3b39d50d59c/coverage-7.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:853c3d3c79ff0db65797aad79dee6be020efd218ac4510f15a205f1e8d13ce25", size = 219468, upload-time = "2026-02-03T14:00:45.829Z" }, - { url = "https://files.pythonhosted.org/packages/d6/31/0714337b7d23630c8de2f4d56acf43c65f8728a45ed529b34410683f7217/coverage-7.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f75695e157c83d374f88dcc646a60cb94173304a9258b2e74ba5a66b7614a51a", size = 219839, upload-time = "2026-02-03T14:00:47.407Z" }, - { url = "https://files.pythonhosted.org/packages/12/99/bd6f2a2738144c98945666f90cae446ed870cecf0421c767475fcf42cdbe/coverage-7.13.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2d098709621d0819039f3f1e471ee554f55a0b2ac0d816883c765b14129b5627", size = 250828, upload-time = "2026-02-03T14:00:49.029Z" }, - { url = "https://files.pythonhosted.org/packages/6f/99/97b600225fbf631e6f5bfd3ad5bcaf87fbb9e34ff87492e5a572ff01bbe2/coverage-7.13.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16d23d6579cf80a474ad160ca14d8b319abaa6db62759d6eef53b2fc979b58c8", size = 253432, upload-time = "2026-02-03T14:00:50.655Z" }, - { url = "https://files.pythonhosted.org/packages/5f/5c/abe2b3490bda26bd4f5e3e799be0bdf00bd81edebedc2c9da8d3ef288fa8/coverage-7.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00d34b29a59d2076e6f318b30a00a69bf63687e30cd882984ed444e753990cc1", size = 254672, upload-time = "2026-02-03T14:00:52.757Z" }, - { url = "https://files.pythonhosted.org/packages/31/ba/5d1957c76b40daff53971fe0adb84d9c2162b614280031d1d0653dd010c1/coverage-7.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ab6d72bffac9deb6e6cb0f61042e748de3f9f8e98afb0375a8e64b0b6e11746b", size = 251050, upload-time = "2026-02-03T14:00:54.332Z" }, - { url = "https://files.pythonhosted.org/packages/69/dc/dffdf3bfe9d32090f047d3c3085378558cb4eb6778cda7de414ad74581ed/coverage-7.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e129328ad1258e49cae0123a3b5fcb93d6c2fa90d540f0b4c7cdcdc019aaa3dc", size = 252801, upload-time = "2026-02-03T14:00:56.121Z" }, - { url = "https://files.pythonhosted.org/packages/87/51/cdf6198b0f2746e04511a30dc9185d7b8cdd895276c07bdb538e37f1cd50/coverage-7.13.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2213a8d88ed35459bda71597599d4eec7c2ebad201c88f0bfc2c26fd9b0dd2ea", size = 250763, upload-time = "2026-02-03T14:00:58.719Z" }, - { url = "https://files.pythonhosted.org/packages/d7/1a/596b7d62218c1d69f2475b69cc6b211e33c83c902f38ee6ae9766dd422da/coverage-7.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:00dd3f02de6d5f5c9c3d95e3e036c3c2e2a669f8bf2d3ceb92505c4ce7838f67", size = 250587, upload-time = "2026-02-03T14:01:01.197Z" }, - { url = "https://files.pythonhosted.org/packages/f7/46/52330d5841ff660f22c130b75f5e1dd3e352c8e7baef5e5fef6b14e3e991/coverage-7.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9bada7bc660d20b23d7d312ebe29e927b655cf414dadcdb6335a2075695bd86", size = 252358, upload-time = "2026-02-03T14:01:02.824Z" }, - { url = "https://files.pythonhosted.org/packages/36/8a/e69a5be51923097ba7d5cff9724466e74fe486e9232020ba97c809a8b42b/coverage-7.13.3-cp313-cp313-win32.whl", hash = "sha256:75b3c0300f3fa15809bd62d9ca8b170eb21fcf0100eb4b4154d6dc8b3a5bbd43", size = 222007, upload-time = "2026-02-03T14:01:04.876Z" }, - { url = "https://files.pythonhosted.org/packages/0a/09/a5a069bcee0d613bdd48ee7637fa73bc09e7ed4342b26890f2df97cc9682/coverage-7.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:a2f7589c6132c44c53f6e705e1a6677e2b7821378c22f7703b2cf5388d0d4587", size = 222812, upload-time = "2026-02-03T14:01:07.296Z" }, - { url = "https://files.pythonhosted.org/packages/3d/4f/d62ad7dfe32f9e3d4a10c178bb6f98b10b083d6e0530ca202b399371f6c1/coverage-7.13.3-cp313-cp313-win_arm64.whl", hash = "sha256:123ceaf2b9d8c614f01110f908a341e05b1b305d6b2ada98763b9a5a59756051", size = 221433, upload-time = "2026-02-03T14:01:09.156Z" }, - { url = "https://files.pythonhosted.org/packages/04/b2/4876c46d723d80b9c5b695f1a11bf5f7c3dabf540ec00d6edc076ff025e6/coverage-7.13.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:cc7fd0f726795420f3678ac82ff882c7fc33770bd0074463b5aef7293285ace9", size = 220162, upload-time = "2026-02-03T14:01:11.409Z" }, - { url = "https://files.pythonhosted.org/packages/fc/04/9942b64a0e0bdda2c109f56bda42b2a59d9d3df4c94b85a323c1cae9fc77/coverage-7.13.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d358dc408edc28730aed5477a69338e444e62fba0b7e9e4a131c505fadad691e", size = 220510, upload-time = "2026-02-03T14:01:13.038Z" }, - { url = "https://files.pythonhosted.org/packages/5a/82/5cfe1e81eae525b74669f9795f37eb3edd4679b873d79d1e6c1c14ee6c1c/coverage-7.13.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d67b9ed6f7b5527b209b24b3df9f2e5bf0198c1bbf99c6971b0e2dcb7e2a107", size = 261801, upload-time = "2026-02-03T14:01:14.674Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ec/a553d7f742fd2cd12e36a16a7b4b3582d5934b496ef2b5ea8abeb10903d4/coverage-7.13.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59224bfb2e9b37c1335ae35d00daa3a5b4e0b1a20f530be208fff1ecfa436f43", size = 263882, upload-time = "2026-02-03T14:01:16.343Z" }, - { url = "https://files.pythonhosted.org/packages/e1/58/8f54a2a93e3d675635bc406de1c9ac8d551312142ff52c9d71b5e533ad45/coverage-7.13.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9306b5299e31e31e0d3b908c66bcb6e7e3ddca143dea0266e9ce6c667346d3", size = 266306, upload-time = "2026-02-03T14:01:18.02Z" }, - { url = "https://files.pythonhosted.org/packages/1a/be/e593399fd6ea1f00aee79ebd7cc401021f218d34e96682a92e1bae092ff6/coverage-7.13.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:343aaeb5f8bb7bcd38620fd7bc56e6ee8207847d8c6103a1e7b72322d381ba4a", size = 261051, upload-time = "2026-02-03T14:01:19.757Z" }, - { url = "https://files.pythonhosted.org/packages/5c/e5/e9e0f6138b21bcdebccac36fbfde9cf15eb1bbcea9f5b1f35cd1f465fb91/coverage-7.13.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2182129f4c101272ff5f2f18038d7b698db1bf8e7aa9e615cb48440899ad32e", size = 263868, upload-time = "2026-02-03T14:01:21.487Z" }, - { url = "https://files.pythonhosted.org/packages/9a/bf/de72cfebb69756f2d4a2dde35efcc33c47d85cd3ebdf844b3914aac2ef28/coverage-7.13.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:94d2ac94bd0cc57c5626f52f8c2fffed1444b5ae8c9fc68320306cc2b255e155", size = 261498, upload-time = "2026-02-03T14:01:23.097Z" }, - { url = "https://files.pythonhosted.org/packages/f2/91/4a2d313a70fc2e98ca53afd1c8ce67a89b1944cd996589a5b1fe7fbb3e5c/coverage-7.13.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:65436cde5ecabe26fb2f0bf598962f0a054d3f23ad529361326ac002c61a2a1e", size = 260394, upload-time = "2026-02-03T14:01:24.949Z" }, - { url = "https://files.pythonhosted.org/packages/40/83/25113af7cf6941e779eb7ed8de2a677865b859a07ccee9146d4cc06a03e3/coverage-7.13.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:db83b77f97129813dbd463a67e5335adc6a6a91db652cc085d60c2d512746f96", size = 262579, upload-time = "2026-02-03T14:01:26.703Z" }, - { url = "https://files.pythonhosted.org/packages/1e/19/a5f2b96262977e82fb9aabbe19b4d83561f5d063f18dde3e72f34ffc3b2f/coverage-7.13.3-cp313-cp313t-win32.whl", hash = "sha256:dfb428e41377e6b9ba1b0a32df6db5409cb089a0ed1d0a672dc4953ec110d84f", size = 222679, upload-time = "2026-02-03T14:01:28.553Z" }, - { url = "https://files.pythonhosted.org/packages/81/82/ef1747b88c87a5c7d7edc3704799ebd650189a9158e680a063308b6125ef/coverage-7.13.3-cp313-cp313t-win_amd64.whl", hash = "sha256:5badd7e596e6b0c89aa8ec6d37f4473e4357f982ce57f9a2942b0221cd9cf60c", size = 223740, upload-time = "2026-02-03T14:01:30.776Z" }, - { url = "https://files.pythonhosted.org/packages/1c/4c/a67c7bb5b560241c22736a9cb2f14c5034149ffae18630323fde787339e4/coverage-7.13.3-cp313-cp313t-win_arm64.whl", hash = "sha256:989aa158c0eb19d83c76c26f4ba00dbb272485c56e452010a3450bdbc9daafd9", size = 221996, upload-time = "2026-02-03T14:01:32.495Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b3/677bb43427fed9298905106f39c6520ac75f746f81b8f01104526a8026e4/coverage-7.13.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c6f6169bbdbdb85aab8ac0392d776948907267fcc91deeacf6f9d55f7a83ae3b", size = 219513, upload-time = "2026-02-03T14:01:34.29Z" }, - { url = "https://files.pythonhosted.org/packages/42/53/290046e3bbf8986cdb7366a42dab3440b9983711eaff044a51b11006c67b/coverage-7.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2f5e731627a3d5ef11a2a35aa0c6f7c435867c7ccbc391268eb4f2ca5dbdcc10", size = 219850, upload-time = "2026-02-03T14:01:35.984Z" }, - { url = "https://files.pythonhosted.org/packages/ea/2b/ab41f10345ba2e49d5e299be8663be2b7db33e77ac1b85cd0af985ea6406/coverage-7.13.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9db3a3285d91c0b70fab9f39f0a4aa37d375873677efe4e71e58d8321e8c5d39", size = 250886, upload-time = "2026-02-03T14:01:38.287Z" }, - { url = "https://files.pythonhosted.org/packages/72/2d/b3f6913ee5a1d5cdd04106f257e5fac5d048992ffc2d9995d07b0f17739f/coverage-7.13.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:06e49c5897cb12e3f7ecdc111d44e97c4f6d0557b81a7a0204ed70a8b038f86f", size = 253393, upload-time = "2026-02-03T14:01:40.118Z" }, - { url = "https://files.pythonhosted.org/packages/f0/f6/b1f48810ffc6accf49a35b9943636560768f0812330f7456aa87dc39aff5/coverage-7.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb25061a66802df9fc13a9ba1967d25faa4dae0418db469264fd9860a921dde4", size = 254740, upload-time = "2026-02-03T14:01:42.413Z" }, - { url = "https://files.pythonhosted.org/packages/57/d0/e59c54f9be0b61808f6bc4c8c4346bd79f02dd6bbc3f476ef26124661f20/coverage-7.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:99fee45adbb1caeb914da16f70e557fb7ff6ddc9e4b14de665bd41af631367ef", size = 250905, upload-time = "2026-02-03T14:01:44.163Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f7/5291bcdf498bafbee3796bb32ef6966e9915aebd4d0954123c8eae921c32/coverage-7.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:318002f1fd819bdc1651c619268aa5bc853c35fa5cc6d1e8c96bd9cd6c828b75", size = 252753, upload-time = "2026-02-03T14:01:45.974Z" }, - { url = "https://files.pythonhosted.org/packages/a0/a9/1dcafa918c281554dae6e10ece88c1add82db685be123e1b05c2056ff3fb/coverage-7.13.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:71295f2d1d170b9977dc386d46a7a1b7cbb30e5405492529b4c930113a33f895", size = 250716, upload-time = "2026-02-03T14:01:48.844Z" }, - { url = "https://files.pythonhosted.org/packages/44/bb/4ea4eabcce8c4f6235df6e059fbc5db49107b24c4bdffc44aee81aeca5a8/coverage-7.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5b1ad2e0dc672625c44bc4fe34514602a9fd8b10d52ddc414dc585f74453516c", size = 250530, upload-time = "2026-02-03T14:01:50.793Z" }, - { url = "https://files.pythonhosted.org/packages/6d/31/4a6c9e6a71367e6f923b27b528448c37f4e959b7e4029330523014691007/coverage-7.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b2beb64c145593a50d90db5c7178f55daeae129123b0d265bdb3cbec83e5194a", size = 252186, upload-time = "2026-02-03T14:01:52.607Z" }, - { url = "https://files.pythonhosted.org/packages/27/92/e1451ef6390a4f655dc42da35d9971212f7abbbcad0bdb7af4407897eb76/coverage-7.13.3-cp314-cp314-win32.whl", hash = "sha256:3d1aed4f4e837a832df2f3b4f68a690eede0de4560a2dbc214ea0bc55aabcdb4", size = 222253, upload-time = "2026-02-03T14:01:55.071Z" }, - { url = "https://files.pythonhosted.org/packages/8a/98/78885a861a88de020c32a2693487c37d15a9873372953f0c3c159d575a43/coverage-7.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f9efbbaf79f935d5fbe3ad814825cbce4f6cdb3054384cb49f0c0f496125fa0", size = 223069, upload-time = "2026-02-03T14:01:56.95Z" }, - { url = "https://files.pythonhosted.org/packages/eb/fb/3784753a48da58a5337972abf7ca58b1fb0f1bda21bc7b4fae992fd28e47/coverage-7.13.3-cp314-cp314-win_arm64.whl", hash = "sha256:31b6e889c53d4e6687ca63706148049494aace140cffece1c4dc6acadb70a7b3", size = 221633, upload-time = "2026-02-03T14:01:58.758Z" }, - { url = "https://files.pythonhosted.org/packages/40/f9/75b732d9674d32cdbffe801ed5f770786dd1c97eecedef2125b0d25102dc/coverage-7.13.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c5e9787cec750793a19a28df7edd85ac4e49d3fb91721afcdc3b86f6c08d9aa8", size = 220243, upload-time = "2026-02-03T14:02:01.109Z" }, - { url = "https://files.pythonhosted.org/packages/cf/7e/2868ec95de5a65703e6f0c87407ea822d1feb3619600fbc3c1c4fa986090/coverage-7.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e5b86db331c682fd0e4be7098e6acee5e8a293f824d41487c667a93705d415ca", size = 220515, upload-time = "2026-02-03T14:02:02.862Z" }, - { url = "https://files.pythonhosted.org/packages/7d/eb/9f0d349652fced20bcaea0f67fc5777bd097c92369f267975732f3dc5f45/coverage-7.13.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:edc7754932682d52cf6e7a71806e529ecd5ce660e630e8bd1d37109a2e5f63ba", size = 261874, upload-time = "2026-02-03T14:02:04.727Z" }, - { url = "https://files.pythonhosted.org/packages/ee/a5/6619bc4a6c7b139b16818149a3e74ab2e21599ff9a7b6811b6afde99f8ec/coverage-7.13.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3a16d6398666510a6886f67f43d9537bfd0e13aca299688a19daa84f543122f", size = 264004, upload-time = "2026-02-03T14:02:06.634Z" }, - { url = "https://files.pythonhosted.org/packages/29/b7/90aa3fc645a50c6f07881fca4fd0ba21e3bfb6ce3a7078424ea3a35c74c9/coverage-7.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:303d38b19626c1981e1bb067a9928236d88eb0e4479b18a74812f05a82071508", size = 266408, upload-time = "2026-02-03T14:02:09.037Z" }, - { url = "https://files.pythonhosted.org/packages/62/55/08bb2a1e4dcbae384e638f0effef486ba5987b06700e481691891427d879/coverage-7.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:284e06eadfe15ddfee2f4ee56631f164ef897a7d7d5a15bca5f0bb88889fc5ba", size = 260977, upload-time = "2026-02-03T14:02:11.755Z" }, - { url = "https://files.pythonhosted.org/packages/9b/76/8bd4ae055a42d8fb5dd2230e5cf36ff2e05f85f2427e91b11a27fea52ed7/coverage-7.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d401f0864a1d3198422816878e4e84ca89ec1c1bf166ecc0ae01380a39b888cd", size = 263868, upload-time = "2026-02-03T14:02:13.565Z" }, - { url = "https://files.pythonhosted.org/packages/e3/f9/ba000560f11e9e32ec03df5aa8477242c2d95b379c99ac9a7b2e7fbacb1a/coverage-7.13.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3f379b02c18a64de78c4ccdddf1c81c2c5ae1956c72dacb9133d7dd7809794ab", size = 261474, upload-time = "2026-02-03T14:02:16.069Z" }, - { url = "https://files.pythonhosted.org/packages/90/4b/4de4de8f9ca7af4733bfcf4baa440121b7dbb3856daf8428ce91481ff63b/coverage-7.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:7a482f2da9086971efb12daca1d6547007ede3674ea06e16d7663414445c683e", size = 260317, upload-time = "2026-02-03T14:02:17.996Z" }, - { url = "https://files.pythonhosted.org/packages/05/71/5cd8436e2c21410ff70be81f738c0dddea91bcc3189b1517d26e0102ccb3/coverage-7.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:562136b0d401992118d9b49fbee5454e16f95f85b120a4226a04d816e33fe024", size = 262635, upload-time = "2026-02-03T14:02:20.405Z" }, - { url = "https://files.pythonhosted.org/packages/e7/f8/2834bb45bdd70b55a33ec354b8b5f6062fc90e5bb787e14385903a979503/coverage-7.13.3-cp314-cp314t-win32.whl", hash = "sha256:ca46e5c3be3b195098dd88711890b8011a9fa4feca942292bb84714ce5eab5d3", size = 223035, upload-time = "2026-02-03T14:02:22.323Z" }, - { url = "https://files.pythonhosted.org/packages/26/75/f8290f0073c00d9ae14056d2b84ab92dff21d5370e464cb6cb06f52bf580/coverage-7.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:06d316dbb3d9fd44cca05b2dbcfbef22948493d63a1f28e828d43e6cc505fed8", size = 224142, upload-time = "2026-02-03T14:02:24.143Z" }, - { url = "https://files.pythonhosted.org/packages/03/01/43ac78dfea8946c4a9161bbc034b5549115cb2b56781a4b574927f0d141a/coverage-7.13.3-cp314-cp314t-win_arm64.whl", hash = "sha256:299d66e9218193f9dc6e4880629ed7c4cd23486005166247c283fb98531656c3", size = 222166, upload-time = "2026-02-03T14:02:26.005Z" }, - { url = "https://files.pythonhosted.org/packages/7d/fb/70af542d2d938c778c9373ce253aa4116dbe7c0a5672f78b2b2ae0e1b94b/coverage-7.13.3-py3-none-any.whl", hash = "sha256:90a8af9dba6429b2573199622d72e0ebf024d6276f16abce394ad4d181bb0910", size = 211237, upload-time = "2026-02-03T14:02:27.986Z" }, +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/d4/7827d9ffa34d5d4d752eec907022aa417120936282fc488306f5da08c292/coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415", size = 219152, upload-time = "2026-02-09T12:56:11.974Z" }, + { url = "https://files.pythonhosted.org/packages/35/b0/d69df26607c64043292644dbb9dc54b0856fabaa2cbb1eeee3331cc9e280/coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b", size = 219667, upload-time = "2026-02-09T12:56:13.33Z" }, + { url = "https://files.pythonhosted.org/packages/82/a4/c1523f7c9e47b2271dbf8c2a097e7a1f89ef0d66f5840bb59b7e8814157b/coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a", size = 246425, upload-time = "2026-02-09T12:56:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/aa7ec01d1a5023c4b680ab7257f9bfde9defe8fdddfe40be096ac19e8177/coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f", size = 248229, upload-time = "2026-02-09T12:56:16.31Z" }, + { url = "https://files.pythonhosted.org/packages/35/98/85aba0aed5126d896162087ef3f0e789a225697245256fc6181b95f47207/coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012", size = 250106, upload-time = "2026-02-09T12:56:18.024Z" }, + { url = "https://files.pythonhosted.org/packages/96/72/1db59bd67494bc162e3e4cd5fbc7edba2c7026b22f7c8ef1496d58c2b94c/coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def", size = 252021, upload-time = "2026-02-09T12:56:19.272Z" }, + { url = "https://files.pythonhosted.org/packages/9d/97/72899c59c7066961de6e3daa142d459d47d104956db43e057e034f015c8a/coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256", size = 247114, upload-time = "2026-02-09T12:56:21.051Z" }, + { url = "https://files.pythonhosted.org/packages/39/1f/f1885573b5970235e908da4389176936c8933e86cb316b9620aab1585fa2/coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda", size = 248143, upload-time = "2026-02-09T12:56:22.585Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cf/e80390c5b7480b722fa3e994f8202807799b85bc562aa4f1dde209fbb7be/coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92", size = 246152, upload-time = "2026-02-09T12:56:23.748Z" }, + { url = "https://files.pythonhosted.org/packages/44/bf/f89a8350d85572f95412debb0fb9bb4795b1d5b5232bd652923c759e787b/coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c", size = 249959, upload-time = "2026-02-09T12:56:25.209Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/612a02aece8178c818df273e8d1642190c4875402ca2ba74514394b27aba/coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58", size = 246416, upload-time = "2026-02-09T12:56:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/cb/98/b5afc39af67c2fa6786b03c3a7091fc300947387ce8914b096db8a73d67a/coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9", size = 247025, upload-time = "2026-02-09T12:56:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/51/30/2bba8ef0682d5bd210c38fe497e12a06c9f8d663f7025e9f5c2c31ce847d/coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf", size = 221758, upload-time = "2026-02-09T12:56:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/78/13/331f94934cf6c092b8ea59ff868eb587bc8fe0893f02c55bc6c0183a192e/coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95", size = 222693, upload-time = "2026-02-09T12:56:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, + { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, + { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, + { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, + { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, + { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, + { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, + { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, ] [package.optional-dependencies] @@ -1839,7 +1853,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.128.5" +version = "0.128.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1848,9 +1862,9 @@ dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/d4/811e7283aaaa84f1e7bd55fb642b58f8c01895e4884a9b7628cb55e00d63/fastapi-0.128.5.tar.gz", hash = "sha256:a7173579fc162d6471e3c6fbd9a4b7610c7a3b367bcacf6c4f90d5d022cab711", size = 374636, upload-time = "2026-02-08T10:22:30.493Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/d1/195005b5e45b443e305136df47ee7df4493d782e0c039dd0d97065580324/fastapi-0.128.6.tar.gz", hash = "sha256:0cb3946557e792d731b26a42b04912f16367e3c3135ea8290f620e234f2b604f", size = 374757, upload-time = "2026-02-09T17:27:03.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/e0/511972dba23ee76c0e9d09d1ae95e916fc8ebce5322b2b8b65a481428b10/fastapi-0.128.5-py3-none-any.whl", hash = "sha256:bceec0de8aa6564599c5bcc0593b0d287703562c848271fca8546fd2c87bf4dd", size = 103677, upload-time = "2026-02-08T10:22:28.919Z" }, + { url = "https://files.pythonhosted.org/packages/24/58/a2c4f6b240eeb148fb88cdac48f50a194aba760c1ca4988c6031c66a20ee/fastapi-0.128.6-py3-none-any.whl", hash = "sha256:bb1c1ef87d6086a7132d0ab60869d6f1ee67283b20fbf84ec0003bd335099509", size = 103674, upload-time = "2026-02-09T17:27:02.355Z" }, ] [[package]] @@ -2945,7 +2959,7 @@ wheels = [ [[package]] name = "langfuse" -version = "3.13.0" +version = "3.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2959,9 +2973,9 @@ dependencies = [ { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/d0/744e5613c728427330ac2049da0f54fc313e8bf84622f71b025bfba65496/langfuse-3.13.0.tar.gz", hash = "sha256:dacea8111ca4442e97dbfec4f8d676cf9709b35357a26e468f8887b95de0012f", size = 233420, upload-time = "2026-02-06T19:54:14.415Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/7a/ddd8df7f2c5d8c2112a8b20fa88b2513917e2c25d2ef87034c0927f87596/langfuse-3.14.1.tar.gz", hash = "sha256:404a6104cd29353d7829aa417ec46565b04917e5599afdda96c5b0865f4bc991", size = 234530, upload-time = "2026-02-09T15:37:45.994Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/63/148382e8e79948f7e5c9c137288e504bb88117574eb7e7c886b4fb470b4b/langfuse-3.13.0-py3-none-any.whl", hash = "sha256:71912ddac1cc831a65df895eae538a556f564c094ae51473e747426e9ded1a9d", size = 417626, upload-time = "2026-02-06T19:54:12.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/b9/e8ac3072469737358975da66ec4218dc1cee0051555dd4665b3e34a28420/langfuse-3.14.1-py3-none-any.whl", hash = "sha256:17bed605dbfc9947cbd1738a715f6d27c1b80b6da9f2946586171958fa5820d0", size = 420336, upload-time = "2026-02-09T15:37:44.381Z" }, ] [[package]] @@ -3864,7 +3878,7 @@ wheels = [ [[package]] name = "openai" -version = "2.17.0" +version = "2.18.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3876,9 +3890,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/a2/677f22c4b487effb8a09439fb6134034b5f0a39ca27df8b95fac23a93720/openai-2.17.0.tar.gz", hash = "sha256:47224b74bd20f30c6b0a6a329505243cb2f26d5cf84d9f8d0825ff8b35e9c999", size = 631445, upload-time = "2026-02-05T16:27:40.953Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/cb/f2c9f988a06d1fcdd18ddc010f43ac384219a399eb01765493d6b34b1461/openai-2.18.0.tar.gz", hash = "sha256:5018d3bcb6651c5aac90e6d0bf9da5cde1bdd23749f67b45b37c522b6e6353af", size = 632124, upload-time = "2026-02-09T21:42:18.017Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/97/284535aa75e6e84ab388248b5a323fc296b1f70530130dee37f7f4fbe856/openai-2.17.0-py3-none-any.whl", hash = "sha256:4f393fd886ca35e113aac7ff239bcd578b81d8f104f5aedc7d3693eb2af1d338", size = 1069524, upload-time = "2026-02-05T16:27:38.941Z" }, + { url = "https://files.pythonhosted.org/packages/20/5f/8940e0641c223eaf972732b3154f2178a968290f8cb99e8c88582cde60ed/openai-2.18.0-py3-none-any.whl", hash = "sha256:538f97e1c77a00e3a99507688c878cda7e9e63031807ba425c68478854d48b30", size = 1069897, upload-time = "2026-02-09T21:42:16.4Z" }, ] [[package]] @@ -4525,7 +4539,7 @@ wheels = [ [[package]] name = "posthog" -version = "7.8.3" +version = "7.8.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4535,9 +4549,9 @@ dependencies = [ { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/ad/2f116cd9b83dc83ece4328a4efe0bcb80e5c2993837f89a788467d261da8/posthog-7.8.3.tar.gz", hash = "sha256:2b85e818bf818ac2768a890b772b7c12d4f909797226acd9327d66a319dbcf83", size = 167083, upload-time = "2026-02-06T13:16:22.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/b3/415f97ca0f0b13ab234ec81e8103d8db27589c0a97c32e939c2e186a689b/posthog-7.8.4.tar.gz", hash = "sha256:b8f2c302b2acb3a4b0067a1eb6b26234e578b663772554c374abb69d86b72525", size = 168300, upload-time = "2026-02-09T21:29:24.968Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e5/5a4b060cbb9aa9defb8bfd55d15899b3146fece14147f4d66be80e81955a/posthog-7.8.3-py3-none-any.whl", hash = "sha256:1840796e4f7e14dd91ec5fdeb939712c3383fe9e758cfcdeb0317d8f30f7b901", size = 192528, upload-time = "2026-02-06T13:16:21.385Z" }, + { url = "https://files.pythonhosted.org/packages/1a/59/7459759ca3a8abcb06694cafdc213dca3dbc49439832ed5acdb4aa81fcde/posthog-7.8.4-py3-none-any.whl", hash = "sha256:21afb6784ab01b6115b90ad15d9400a476849f26d2d637000332c78b498b9784", size = 193841, upload-time = "2026-02-09T21:29:23.85Z" }, ] [[package]] @@ -5319,14 +5333,14 @@ wheels = [ [[package]] name = "redis" -version = "7.1.0" +version = "7.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "(python_full_version < '3.11.3' and sys_platform == 'darwin') or (python_full_version < '3.11.3' and sys_platform == 'linux') or (python_full_version < '3.11.3' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/80/2971931d27651affa88a44c0ad7b8c4a19dc29c998abb20b23868d319b59/redis-7.1.1.tar.gz", hash = "sha256:a2814b2bda15b39dad11391cc48edac4697214a8a5a4bd10abe936ab4892eb43", size = 4800064, upload-time = "2026-02-09T18:39:40.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" }, + { url = "https://files.pythonhosted.org/packages/29/55/1de1d812ba1481fa4b37fb03b4eec0fcb71b6a0d44c04ea3482eb017600f/redis-7.1.1-py3-none-any.whl", hash = "sha256:f77817f16071c2950492c67d40b771fa493eb3fccc630a424a10976dbb794b7a", size = 356057, upload-time = "2026-02-09T18:39:38.602Z" }, ] [[package]] From 5f25718f972ea8fe43d7ab2ccf2b9b2de449833a Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 9 Feb 2026 14:58:08 -0800 Subject: [PATCH 10/16] fix pipeline --- .../core/agent_framework/_workflows/_checkpoint_encoding.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index 700d841374..abe3b8b2c4 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -3,7 +3,7 @@ from __future__ import annotations import base64 -import pickle # noqa: S403 +import pickle # nosec # noqa: S403 from typing import Any from agent_framework import get_logger @@ -146,7 +146,7 @@ def _pickle_to_base64(value: Any) -> str: def _base64_to_unpickle(encoded: str) -> Any: """Decode base64 string and unpickle.""" pickled = base64.b64decode(encoded.encode("ascii")) - return pickle.loads(pickled) # noqa: S301 + return pickle.loads(pickled) # nosec # noqa: S301 def _type_to_key(t: type) -> str: From 8c8a966d9de7475c5308461fa8b251c214799cae Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 9 Feb 2026 16:46:26 -0800 Subject: [PATCH 11/16] Copilot comments --- python/packages/core/agent_framework/_workflows/_workflow.py | 2 +- python/packages/core/tests/workflow/test_agent_executor.py | 5 ++++- .../agent_framework_orchestrations/_orchestration_state.py | 2 ++ python/packages/orchestrations/tests/test_concurrent.py | 5 ++++- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index 72c1945a9c..5a9b3858df 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -189,7 +189,7 @@ def __init__( start_executor: The starting executor for the workflow. runner_context: The RunnerContext instance to be used during workflow execution. max_iterations: The maximum number of iterations the workflow will run for convergence. - name: Optional human-readable name for the workflow. This can be used to identify the workflow in + name: A human-readable name for the workflow. This can be used to identify the workflow in checkpoints, and telemetry. If the workflow is built using WorkflowBuilder, this will be the name of the builder. This name should be unique across different workflow definitions for better observability and management. diff --git a/python/packages/core/tests/workflow/test_agent_executor.py b/python/packages/core/tests/workflow/test_agent_executor.py index e46d1864c6..34c29e92b7 100644 --- a/python/packages/core/tests/workflow/test_agent_executor.py +++ b/python/packages/core/tests/workflow/test_agent_executor.py @@ -85,7 +85,10 @@ async def test_agent_executor_checkpoint_stores_and_restores_state() -> None: # Verify checkpoint was created checkpoints = await storage.list_checkpoints(wf.name) - assert len(checkpoints) > 0 + assert len(checkpoints) >= 2, ( + "Expected at least 2 checkpoints. The first one is after the start executor, " + "and the second one is after the agent execution." + ) # Get the second checkpoint which should contain the state after processing # the first message by the start executor in the sequential workflow diff --git a/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py b/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py index 3f3c3c5f9b..31846f9a82 100644 --- a/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py +++ b/python/packages/orchestrations/agent_framework_orchestrations/_orchestration_state.py @@ -62,6 +62,7 @@ def to_dict(self) -> dict[str, Any]: result: dict[str, Any] = { "conversation": self.conversation, "round_index": self.round_index, + "orchestrator_name": self.orchestrator_name, "metadata": dict(self.metadata), } if self.task is not None: @@ -86,6 +87,7 @@ def from_dict(cls, data: dict[str, Any]) -> OrchestrationState: return cls( conversation=data.get("conversation", []), round_index=data.get("round_index", 0), + orchestrator_name=data.get("orchestrator_name", ""), metadata=dict(data.get("metadata", {})), task=task, ) diff --git a/python/packages/orchestrations/tests/test_concurrent.py b/python/packages/orchestrations/tests/test_concurrent.py index 00e540afc4..3a35f56238 100644 --- a/python/packages/orchestrations/tests/test_concurrent.py +++ b/python/packages/orchestrations/tests/test_concurrent.py @@ -396,7 +396,10 @@ async def test_concurrent_checkpoint_runtime_only() -> None: assert baseline_output is not None checkpoints = await storage.list_checkpoints(wf.name) - assert checkpoints + assert len(checkpoints) >= 2, ( + "Expected at least 2 checkpoints. The first one is after the start executor, " + "and the second one is after the first round of agent executions." + ) checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[1] From 585fcf3a370f828f14ced54726ca0e69f7dcb8e2 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 10 Feb 2026 22:35:42 -0800 Subject: [PATCH 12/16] Fix tests --- python/packages/core/agent_framework/_workflows/_workflow.py | 5 +++-- .../core/tests/workflow/test_checkpoint_validation.py | 4 ++-- python/packages/core/tests/workflow/test_runner.py | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_workflow.py b/python/packages/core/agent_framework/_workflows/_workflow.py index f648898361..cd7dbb4a68 100644 --- a/python/packages/core/agent_framework/_workflows/_workflow.py +++ b/python/packages/core/agent_framework/_workflows/_workflow.py @@ -210,7 +210,8 @@ def __init__( self.id = str(uuid.uuid4()) # Capture a canonical fingerprint of the workflow graph so checkpoints can assert they are resumed with # an equivalent topology. - self.graph_signature_hash = self._hash_graph_signature(self._compute_graph_signature()) + self.graph_signature = self._compute_graph_signature() + self.graph_signature_hash = self._hash_graph_signature(self.graph_signature) # Output events (WorkflowEvent with type='output') from these executors are treated as workflow outputs. # If None or empty, all executor outputs are considered workflow outputs. @@ -764,7 +765,7 @@ def _compute_graph_signature(self) -> dict[str, Any]: if isinstance(executor, WorkflowExecutor): executor_sig = { "type": executor_sig, - "sub_workflow": executor.workflow._graph_signature, + "sub_workflow": executor.workflow.graph_signature, } executors_signature[executor_id] = executor_sig diff --git a/python/packages/core/tests/workflow/test_checkpoint_validation.py b/python/packages/core/tests/workflow/test_checkpoint_validation.py index 1a4783c623..2bea42beb4 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_validation.py +++ b/python/packages/core/tests/workflow/test_checkpoint_validation.py @@ -126,7 +126,7 @@ async def test_resume_succeeds_when_sub_workflow_matches() -> None: _ = [event async for event in workflow.run("hello", stream=True)] - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] @@ -150,7 +150,7 @@ async def test_resume_fails_when_sub_workflow_changes() -> None: _ = [event async for event in workflow.run("hello", stream=True)] - checkpoints = await storage.list_checkpoints() + checkpoints = await storage.list_checkpoints(workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] diff --git a/python/packages/core/tests/workflow/test_runner.py b/python/packages/core/tests/workflow/test_runner.py index 9950817821..039c61b07d 100644 --- a/python/packages/core/tests/workflow/test_runner.py +++ b/python/packages/core/tests/workflow/test_runner.py @@ -374,7 +374,7 @@ async def apply_checkpoint(self, checkpoint: WorkflowCheckpoint) -> None: # Restore messages from checkpoint for source_id, messages in checkpoint.messages.items(): for msg_data in messages: - await self.send_message(Message(data=msg_data, source_id=source_id)) + await self.send_message(WorkflowMessage(data=msg_data, source_id=source_id)) class FailingCheckpointContext(InProcRunnerContext): @@ -708,7 +708,7 @@ async def test_runner_checkpoint_with_resumed_flag(): runner._mark_resumed(5) # Add a message to trigger the checkpoint creation path - await ctx.send_message(Message(data=MockMessage(data=8), source_id="START")) + await ctx.send_message(WorkflowMessage(data=MockMessage(data=8), source_id="START")) await executor_a.execute( MockMessage(data=8), From b7a9ecb06188036e4c5650e79b8f0254da5b68a0 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 10 Feb 2026 22:45:04 -0800 Subject: [PATCH 13/16] Fix more tests --- .../core/tests/workflow/test_checkpoint.py | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index 342cf9f022..8a0220619e 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -15,7 +15,7 @@ WorkflowCheckpointException, WorkflowEvent, ) -from agent_framework._workflows._runner_context import Message +from agent_framework._workflows._runner_context import WorkflowMessage # Module-level dataclasses for pickle serialization in roundtrip tests @@ -508,17 +508,17 @@ async def test_memory_checkpoint_storage_roundtrip_messages_with_complex_data(): """Test that messages dict with Message objects roundtrips correctly.""" storage = InMemoryCheckpointStorage() - msg1 = Message( + msg1 = WorkflowMessage( data={"text": "hello", "timestamp": datetime(2025, 1, 1, tzinfo=timezone.utc)}, source_id="source", target_id="target", ) - msg2 = Message( + msg2 = WorkflowMessage( data=(1, 2, 3), source_id="s2", target_id=None, ) - msg3 = Message( + msg3 = WorkflowMessage( data="simple string", source_id="s3", target_id="t3", @@ -545,9 +545,9 @@ async def test_memory_checkpoint_storage_roundtrip_messages_with_complex_data(): loaded_msg3 = loaded.messages["executor2"][0] # Verify Message type is preserved - assert isinstance(loaded_msg1, Message) - assert isinstance(loaded_msg2, Message) - assert isinstance(loaded_msg3, Message) + assert isinstance(loaded_msg1, WorkflowMessage) + assert isinstance(loaded_msg2, WorkflowMessage) + assert isinstance(loaded_msg3, WorkflowMessage) # Verify Message fields assert loaded_msg1.data["text"] == "hello" @@ -630,9 +630,9 @@ async def test_memory_checkpoint_storage_roundtrip_full_checkpoint(): """Test complete WorkflowCheckpoint roundtrip with all fields populated using proper types.""" storage = InMemoryCheckpointStorage() - # Create proper Message objects - msg1 = Message(data="msg1", source_id="s", target_id="t") - msg2 = Message(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") + # Create proper WorkflowMessage objects + msg1 = WorkflowMessage(data="msg1", source_id="s", target_id="t") + msg2 = WorkflowMessage(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") # Create proper WorkflowEvent for pending request pending_event = WorkflowEvent.request_info( @@ -690,8 +690,8 @@ async def test_memory_checkpoint_storage_roundtrip_full_checkpoint(): # Verify messages are proper Message objects loaded_msg1 = loaded.messages["exec1"][0] loaded_msg2 = loaded.messages["exec2"][0] - assert isinstance(loaded_msg1, Message) - assert isinstance(loaded_msg2, Message) + assert isinstance(loaded_msg1, WorkflowMessage) + assert isinstance(loaded_msg2, WorkflowMessage) assert loaded_msg1.data == "msg1" assert loaded_msg1.source_id == "s" assert loaded_msg2.data == datetime(2025, 1, 1, tzinfo=timezone.utc) @@ -1177,17 +1177,17 @@ async def test_file_checkpoint_storage_roundtrip_messages_with_complex_data(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) - msg1 = Message( + msg1 = WorkflowMessage( data={"text": "hello", "timestamp": datetime(2025, 1, 1, tzinfo=timezone.utc)}, source_id="source", target_id="target", ) - msg2 = Message( + msg2 = WorkflowMessage( data=(1, 2, 3), source_id="s2", target_id=None, ) - msg3 = Message( + msg3 = WorkflowMessage( data="simple string", source_id="s3", target_id="t3", @@ -1213,12 +1213,12 @@ async def test_file_checkpoint_storage_roundtrip_messages_with_complex_data(): loaded_msg2 = loaded.messages["executor1"][1] loaded_msg3 = loaded.messages["executor2"][0] - # Verify Message type is preserved - assert isinstance(loaded_msg1, Message) - assert isinstance(loaded_msg2, Message) - assert isinstance(loaded_msg3, Message) + # Verify WorkflowMessage type is preserved + assert isinstance(loaded_msg1, WorkflowMessage) + assert isinstance(loaded_msg2, WorkflowMessage) + assert isinstance(loaded_msg3, WorkflowMessage) - # Verify Message fields + # Verify WorkflowMessage fields assert loaded_msg1.data["text"] == "hello" assert loaded_msg1.data["timestamp"] == datetime(2025, 1, 1, tzinfo=timezone.utc) assert loaded_msg1.source_id == "source" @@ -1301,9 +1301,9 @@ async def test_file_checkpoint_storage_roundtrip_full_checkpoint(): with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) - # Create proper Message objects - msg1 = Message(data="msg1", source_id="s", target_id="t") - msg2 = Message(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") + # Create proper WorkflowMessage objects + msg1 = WorkflowMessage(data="msg1", source_id="s", target_id="t") + msg2 = WorkflowMessage(data=datetime(2025, 1, 1, tzinfo=timezone.utc), source_id="a", target_id="b") # Create proper WorkflowEvent for pending request pending_event = WorkflowEvent.request_info( @@ -1361,8 +1361,8 @@ async def test_file_checkpoint_storage_roundtrip_full_checkpoint(): # Verify messages are proper Message objects loaded_msg1 = loaded.messages["exec1"][0] loaded_msg2 = loaded.messages["exec2"][0] - assert isinstance(loaded_msg1, Message) - assert isinstance(loaded_msg2, Message) + assert isinstance(loaded_msg1, WorkflowMessage) + assert isinstance(loaded_msg2, WorkflowMessage) assert loaded_msg1.data == "msg1" assert loaded_msg1.source_id == "s" assert loaded_msg2.data == datetime(2025, 1, 1, tzinfo=timezone.utc) From 67750646ad8346bdd4ba323bcce1c1baeec3a068 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 10 Feb 2026 23:12:28 -0800 Subject: [PATCH 14/16] Address comments part 1 --- .../_workflows/_agent_executor.py | 18 +----- .../agent_framework/_workflows/_checkpoint.py | 59 +++++++++++++++---- .../_workflows/_checkpoint_encoding.py | 26 ++++++-- 3 files changed, 72 insertions(+), 31 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_agent_executor.py b/python/packages/core/agent_framework/_workflows/_agent_executor.py index 7838e86d50..8290391fb9 100644 --- a/python/packages/core/agent_framework/_workflows/_agent_executor.py +++ b/python/packages/core/agent_framework/_workflows/_agent_executor.py @@ -245,24 +245,10 @@ async def on_checkpoint_restore(self, state: dict[str, Any]) -> None: state: Checkpoint data dict """ cache_payload = state.get("cache") - if cache_payload: - try: - self._cache = cache_payload - except Exception as exc: - logger.warning("Failed to restore cache: %s", exc) - self._cache = [] - else: - self._cache = [] + self._cache = cache_payload or [] full_conversation_payload = state.get("full_conversation") - if full_conversation_payload: - try: - self._full_conversation = full_conversation_payload - except Exception as exc: - logger.warning("Failed to restore full conversation: %s", exc) - self._full_conversation = [] - else: - self._full_conversation = [] + self._full_conversation = full_conversation_payload or [] thread_payload = state.get("agent_thread") if thread_payload: diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 326083eca5..1eebbc3797 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import copy import json import logging import os @@ -98,7 +99,21 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls, data: Mapping[str, Any]) -> WorkflowCheckpoint: - return cls(**data) + """Create a WorkflowCheckpoint from a dictionary. + + Args: + data: Dictionary containing checkpoint fields. + + Returns: + A new WorkflowCheckpoint instance. + + Raises: + WorkflowCheckpointException: If required fields are missing. + """ + try: + return cls(**data) + except Exception as ex: + raise WorkflowCheckpointException(f"Failed to create WorkflowCheckpoint from dict: {ex}") from ex class CheckpointStorage(Protocol): @@ -183,7 +198,7 @@ def __init__(self) -> None: async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: """Save a checkpoint and return its ID.""" - self._checkpoints[checkpoint.checkpoint_id] = checkpoint + self._checkpoints[checkpoint.checkpoint_id] = copy.deepcopy(checkpoint) logger.debug(f"Saved checkpoint {checkpoint.checkpoint_id} to memory") return checkpoint.checkpoint_id @@ -212,7 +227,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: checkpoints = [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] if not checkpoints: return None - latest_checkpoint = max(checkpoints, key=lambda cp: cp.timestamp) + latest_checkpoint = max(checkpoints, key=lambda cp: datetime.fromisoformat(cp.timestamp)) logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint @@ -239,6 +254,26 @@ def __init__(self, storage_path: str | Path): self.storage_path.mkdir(parents=True, exist_ok=True) logger.info(f"Initialized file checkpoint storage at {self.storage_path}") + def _validate_file_path(self, checkpoint_id: CheckpointID) -> Path: + """Validate that a checkpoint ID resolves to a path within the storage directory. + + This can prevent someone from crafting a checkpoint ID that points to an arbitrary + file on the filesystem. + + Args: + checkpoint_id: The checkpoint ID to validate. + + Returns: + The validated file path. + + Raises: + WorkflowCheckpointException: If the checkpoint ID would resolve outside the storage directory. + """ + file_path = (self.storage_path / f"{checkpoint_id}.json").resolve() + if not file_path.is_relative_to(self.storage_path.resolve()): + raise WorkflowCheckpointException(f"Invalid checkpoint ID: {checkpoint_id}") + return file_path + async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: """Save a checkpoint and return its ID. @@ -250,7 +285,7 @@ async def save(self, checkpoint: WorkflowCheckpoint) -> CheckpointID: """ from ._checkpoint_encoding import encode_checkpoint_value - file_path = self.storage_path / f"{checkpoint.checkpoint_id}.json" + file_path = self._validate_file_path(checkpoint.checkpoint_id) checkpoint_dict = checkpoint.to_dict() encoded_checkpoint = encode_checkpoint_value(checkpoint_dict) @@ -275,9 +310,10 @@ async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: The WorkflowCheckpoint object corresponding to the given ID. Raises: - WorkflowCheckpointException: If no checkpoint with the given ID exists. + WorkflowCheckpointException: If no checkpoint with the given ID exists, + or if checkpoint decoding fails. """ - file_path = self.storage_path / f"{checkpoint_id}.json" + file_path = self._validate_file_path(checkpoint_id) if not file_path.exists(): raise WorkflowCheckpointException(f"No checkpoint found with ID {checkpoint_id}") @@ -288,9 +324,12 @@ def _read() -> dict[str, Any]: encoded_checkpoint = await asyncio.to_thread(_read) - from ._checkpoint_encoding import decode_checkpoint_value + from ._checkpoint_encoding import CheckpointDecodingError, decode_checkpoint_value - decoded_checkpoint_dict = decode_checkpoint_value(encoded_checkpoint) + try: + decoded_checkpoint_dict = decode_checkpoint_value(encoded_checkpoint) + except CheckpointDecodingError as exc: + raise WorkflowCheckpointException(f"Failed to decode checkpoint {checkpoint_id}: {exc}") from exc checkpoint = WorkflowCheckpoint.from_dict(decoded_checkpoint_dict) logger.info(f"Loaded checkpoint {checkpoint_id} from {file_path}") return checkpoint @@ -332,7 +371,7 @@ async def delete(self, checkpoint_id: CheckpointID) -> bool: Returns: True if the checkpoint was successfully deleted, False if no checkpoint with the given ID exists. """ - file_path = self.storage_path / f"{checkpoint_id}.json" + file_path = self._validate_file_path(checkpoint_id) def _delete() -> bool: if file_path.exists(): @@ -355,7 +394,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: checkpoints = await self.list_checkpoints(workflow_name) if not checkpoints: return None - latest_checkpoint = max(checkpoints, key=lambda cp: cp.timestamp) + latest_checkpoint = max(checkpoints, key=lambda cp: datetime.fromisoformat(cp.timestamp)) logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index abe3b8b2c4..b595fb37bf 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -58,7 +58,9 @@ def decode_checkpoint_value(value: Any) -> Any: Pickled values (identified by _PICKLE_MARKER) are decoded and unpickled. WARNING: Only call this with trusted data. Pickle can execute - arbitrary code during deserialization. + arbitrary code during deserialization. The post-unpickle type verification + detects accidental corruption or type mismatches, but cannot prevent + arbitrary code execution from malicious pickle payloads. Args: value: A JSON-deserialized value from checkpoint storage. @@ -68,7 +70,8 @@ def decode_checkpoint_value(value: Any) -> Any: Raises: CheckpointDecodingError: If the unpickled object's type doesn't match - the recorded type, indicating corruption or tampering. + the recorded type, indicating corruption, or if the base64/pickle + data is malformed. """ return _decode(value) @@ -121,6 +124,11 @@ def _decode(value: Any) -> Any: def _verify_type(obj: Any, expected_type_key: str) -> None: """Verify that an unpickled object matches its recorded type. + This is a post-deserialization integrity check that detects accidental + corruption or type mismatches. It does not prevent arbitrary code execution + from malicious pickle payloads, since ``pickle.loads()`` has already + executed by the time this function is called. + Args: obj: The unpickled object. expected_type_key: The recorded type key (module:qualname format). @@ -144,9 +152,17 @@ def _pickle_to_base64(value: Any) -> str: def _base64_to_unpickle(encoded: str) -> Any: - """Decode base64 string and unpickle.""" - pickled = base64.b64decode(encoded.encode("ascii")) - return pickle.loads(pickled) # nosec # noqa: S301 + """Decode base64 string and unpickle. + + Raises: + CheckpointDecodingError: If the base64 data is corrupted or the pickle + format is incompatible. + """ + try: + pickled = base64.b64decode(encoded.encode("ascii")) + return pickle.loads(pickled) # nosec # noqa: S301 + except Exception as exc: + raise CheckpointDecodingError(f"Failed to decode pickled checkpoint data: {exc}") from exc def _type_to_key(t: type) -> str: From 1d8ce63349b3bad5d8f59132b1972e02e69bfa3a Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 10 Feb 2026 23:20:37 -0800 Subject: [PATCH 15/16] Address comments part 2 --- .../_workflows/_checkpoint_encoding.py | 6 +-- .../core/tests/workflow/test_checkpoint.py | 41 ++++++++--------- .../tests/workflow/test_checkpoint_decode.py | 45 +++++++++++++++++++ .../tests/workflow/test_checkpoint_encode.py | 14 +++--- 4 files changed, 75 insertions(+), 31 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index b595fb37bf..524f291c5e 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -86,11 +86,11 @@ def _encode(value: Any) -> Any: if isinstance(value, dict): return {str(k): _encode(v) for k, v in value.items()} # type: ignore - # Recursively encode list/tuple/set items - if isinstance(value, (list, tuple, set)): + # Recursively encode list items (lists are JSON-native collections) + if isinstance(value, list): return [_encode(item) for item in value] # type: ignore - # Everything else: pickle and base64 encode + # Everything else (tuples, sets, dataclasses, custom objects, etc.): pickle and base64 encode return { _PICKLE_MARKER: _pickle_to_base64(value), _TYPE_MARKER: _type_to_key(type(value)), # type: ignore diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index 8a0220619e..a72f9abec6 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -1074,9 +1074,8 @@ async def test_file_checkpoint_storage_roundtrip_dataclass(): async def test_file_checkpoint_storage_roundtrip_tuple_and_set(): """Test tuple/frozenset encoding behavior. - Note: Tuples containing only JSON-native types become lists in JSON encoding. - Frozensets get pickled since they're not JSON-serializable collections. - For type-preserving tuple storage, wrap them in a dataclass or other non-JSON-native type. + Tuples, sets, and frozensets are pickled to preserve their type through + the encode/decode roundtrip. """ with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) @@ -1097,16 +1096,17 @@ async def test_file_checkpoint_storage_roundtrip_tuple_and_set(): await storage.save(checkpoint) loaded = await storage.load(checkpoint.checkpoint_id) - # Tuples containing JSON-native values become lists (JSON doesn't have tuple type) - assert loaded.state["my_tuple"] == [1, "two", 3.0, None] - assert isinstance(loaded.state["my_tuple"], list) + # Tuples preserve their type through roundtrip + assert loaded.state["my_tuple"] == original_tuple + assert isinstance(loaded.state["my_tuple"], tuple) # Frozensets are pickled and preserve their type assert loaded.state["my_frozenset"] == original_frozenset assert isinstance(loaded.state["my_frozenset"], frozenset) - # Nested tuples also become lists - assert loaded.state["nested_tuple"]["inner"] == [10, 20, 30] + # Nested tuples also preserve their type + assert loaded.state["nested_tuple"]["inner"] == (10, 20, 30) + assert isinstance(loaded.state["nested_tuple"]["inner"], tuple) async def test_file_checkpoint_storage_roundtrip_complex_nested_structures(): @@ -1154,22 +1154,22 @@ async def test_file_checkpoint_storage_roundtrip_complex_nested_structures(): assert loaded.state["level1"]["level2"]["level3"]["deep_string"] == "hello" assert loaded.state["level1"]["level2"]["level3"]["deep_int"] == 123 assert loaded.state["level1"]["level2"]["level3"]["deep_datetime"] == datetime(2025, 1, 1, tzinfo=timezone.utc) - # Tuples containing JSON-native values become lists - assert loaded.state["level1"]["level2"]["level3"]["deep_tuple"] == [1, 2, 3] + # Tuples preserve their type through roundtrip + assert loaded.state["level1"]["level2"]["level3"]["deep_tuple"] == (1, 2, 3) # Verify list of dicts assert loaded.state["level1"]["list_of_dicts"][0]["a"] == 1 assert loaded.state["level1"]["list_of_dicts"][0]["b"] == datetime(2025, 2, 1, tzinfo=timezone.utc) - # Tuples containing JSON-native values become lists - assert loaded.state["level1"]["list_of_dicts"][1]["d"] == [4, 5, 6] + # Tuples preserve their type through roundtrip + assert loaded.state["level1"]["list_of_dicts"][1]["d"] == (4, 5, 6) # Verify mixed list with correct types assert loaded.state["mixed_list"][0] == "string" assert loaded.state["mixed_list"][1] == 42 assert loaded.state["mixed_list"][5] == datetime(2025, 3, 1, tzinfo=timezone.utc) - # Tuples containing JSON-native values become lists - assert loaded.state["mixed_list"][6] == [7, 8, 9] - assert isinstance(loaded.state["mixed_list"][6], list) + # Tuples preserve their type through roundtrip + assert loaded.state["mixed_list"][6] == (7, 8, 9) + assert isinstance(loaded.state["mixed_list"][6], tuple) async def test_file_checkpoint_storage_roundtrip_messages_with_complex_data(): @@ -1408,10 +1408,7 @@ async def test_file_checkpoint_storage_roundtrip_bytes(): async def test_file_checkpoint_storage_roundtrip_empty_collections(): - """Test that empty collections roundtrip correctly. - - Note: Empty tuples become empty lists (JSON doesn't have tuple type). - """ + """Test that empty collections roundtrip correctly.""" with tempfile.TemporaryDirectory() as temp_dir: storage = FileCheckpointStorage(temp_dir) @@ -1433,9 +1430,9 @@ async def test_file_checkpoint_storage_roundtrip_empty_collections(): assert loaded.state["empty_dict"] == {} assert loaded.state["empty_list"] == [] - # Empty tuples become empty lists (JSON doesn't have tuple type) - assert loaded.state["empty_tuple"] == [] - assert isinstance(loaded.state["empty_tuple"], list) + # Empty tuples preserve their type through roundtrip + assert loaded.state["empty_tuple"] == () + assert isinstance(loaded.state["empty_tuple"], tuple) assert loaded.state["nested_empty"]["inner_dict"] == {} assert loaded.messages == {} assert loaded.pending_request_info_events == {} diff --git a/python/packages/core/tests/workflow/test_checkpoint_decode.py b/python/packages/core/tests/workflow/test_checkpoint_decode.py index 99daa8cedb..5ef9bc480f 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_decode.py +++ b/python/packages/core/tests/workflow/test_checkpoint_decode.py @@ -204,3 +204,48 @@ def test_roundtrip_regular_class() -> None: assert isinstance(decoded, NotADataclass) assert decoded.value == "test_value" + + +def test_roundtrip_tuple() -> None: + """Test that tuples preserve their type through encode/decode roundtrip.""" + original = (1, "two", 3.0) + + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) + + assert isinstance(decoded, tuple) + assert decoded == original + + +def test_roundtrip_set() -> None: + """Test that sets preserve their type through encode/decode roundtrip.""" + original = {1, 2, 3} + + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) + + assert isinstance(decoded, set) + assert decoded == original + + +def test_roundtrip_nested_tuple_in_dict() -> None: + """Test that tuples nested inside dicts preserve their type.""" + original = {"items": (1, 2, 3), "name": "test"} + + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) + + assert isinstance(decoded["items"], tuple) + assert decoded["items"] == (1, 2, 3) + assert decoded["name"] == "test" + + +def test_roundtrip_set_in_list() -> None: + """Test that sets nested inside lists preserve their type.""" + original = [{"tags": {1, 2, 3}}] + + encoded = encode_checkpoint_value(original) + decoded = decode_checkpoint_value(encoded) + + assert isinstance(decoded[0]["tags"], set) + assert decoded[0]["tags"] == {1, 2, 3} diff --git a/python/packages/core/tests/workflow/test_checkpoint_encode.py b/python/packages/core/tests/workflow/test_checkpoint_encode.py index 7c90cc6436..68ec1ac4e3 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_encode.py +++ b/python/packages/core/tests/workflow/test_checkpoint_encode.py @@ -120,19 +120,21 @@ def test_encode_simple_list() -> None: def test_encode_tuple() -> None: - """Test encoding a tuple (converted to list).""" + """Test encoding a tuple (pickled to preserve type).""" data = (1, 2, 3) result = encode_checkpoint_value(data) - assert isinstance(result, list) - assert result == [1, 2, 3] + assert isinstance(result, dict) + assert _PICKLE_MARKER in result + assert _TYPE_MARKER in result def test_encode_set() -> None: - """Test encoding a set (converted to list).""" + """Test encoding a set (pickled to preserve type).""" data = {1, 2, 3} result = encode_checkpoint_value(data) - assert isinstance(result, list) - assert sorted(result) == [1, 2, 3] + assert isinstance(result, dict) + assert _PICKLE_MARKER in result + assert _TYPE_MARKER in result def test_encode_nested_dict() -> None: From fbed10f93ad904e115ee74957500500bac56726f Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Wed, 11 Feb 2026 10:12:04 -0800 Subject: [PATCH 16/16] Comments --- .../agent_framework/_workflows/_checkpoint.py | 20 +++++----- .../tests/workflow/test_agent_executor.py | 2 +- .../core/tests/workflow/test_checkpoint.py | 40 +++++++++---------- .../workflow/test_checkpoint_validation.py | 8 ++-- .../test_request_info_event_rehydrate.py | 6 +-- .../core/tests/workflow/test_sub_workflow.py | 2 +- .../core/tests/workflow/test_workflow.py | 6 +-- .../tests/workflow/test_workflow_agent.py | 2 +- .../devui/agent_framework_devui/_executor.py | 2 +- .../orchestrations/tests/test_concurrent.py | 8 ++-- .../orchestrations/tests/test_group_chat.py | 6 +-- .../orchestrations/tests/test_magentic.py | 12 +++--- .../orchestrations/tests/test_sequential.py | 8 ++-- ...ff_with_tool_approval_checkpoint_resume.py | 14 ++----- .../orchestrations/magentic_checkpoint.py | 21 +++------- .../checkpoint_with_human_in_the_loop.py | 5 ++- .../checkpoint/checkpoint_with_resume.py | 5 +-- .../checkpoint/sub_workflow_checkpoint.py | 6 +-- .../workflow_as_agent_checkpoint.py | 6 +-- 19 files changed, 81 insertions(+), 98 deletions(-) diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint.py b/python/packages/core/agent_framework/_workflows/_checkpoint.py index 1eebbc3797..4d3f87b89e 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint.py @@ -144,7 +144,7 @@ async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: """ ... - async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, *, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name. Args: @@ -166,7 +166,7 @@ async def delete(self, checkpoint_id: CheckpointID) -> bool: """ ... - async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + async def get_latest(self, *, workflow_name: str) -> WorkflowCheckpoint | None: """Get the latest checkpoint for a given workflow name. Args: @@ -177,7 +177,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: """ ... - async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, *, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs for a given workflow name. Args: @@ -210,7 +210,7 @@ async def load(self, checkpoint_id: CheckpointID) -> WorkflowCheckpoint: return checkpoint raise WorkflowCheckpointException(f"No checkpoint found with ID {checkpoint_id}") - async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, *, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name.""" return [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] @@ -222,7 +222,7 @@ async def delete(self, checkpoint_id: CheckpointID) -> bool: return True return False - async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + async def get_latest(self, *, workflow_name: str) -> WorkflowCheckpoint | None: """Get the latest checkpoint for a given workflow name.""" checkpoints = [cp for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] if not checkpoints: @@ -231,7 +231,7 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint - async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, *, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs. If workflow_id is provided, filter by that workflow.""" return [cp.checkpoint_id for cp in self._checkpoints.values() if cp.workflow_name == workflow_name] @@ -334,7 +334,7 @@ def _read() -> dict[str, Any]: logger.info(f"Loaded checkpoint {checkpoint_id} from {file_path}") return checkpoint - async def list_checkpoints(self, workflow_name: str) -> list[WorkflowCheckpoint]: + async def list_checkpoints(self, *, workflow_name: str) -> list[WorkflowCheckpoint]: """List checkpoint objects for a given workflow name. Args: @@ -382,7 +382,7 @@ def _delete() -> bool: return await asyncio.to_thread(_delete) - async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: + async def get_latest(self, *, workflow_name: str) -> WorkflowCheckpoint | None: """Get the latest checkpoint for a given workflow name. Args: @@ -391,14 +391,14 @@ async def get_latest(self, workflow_name: str) -> WorkflowCheckpoint | None: Returns: The latest WorkflowCheckpoint object for the specified workflow name, or None if no checkpoints exist. """ - checkpoints = await self.list_checkpoints(workflow_name) + checkpoints = await self.list_checkpoints(workflow_name=workflow_name) if not checkpoints: return None latest_checkpoint = max(checkpoints, key=lambda cp: datetime.fromisoformat(cp.timestamp)) logger.debug(f"Latest checkpoint for workflow {workflow_name} is {latest_checkpoint.checkpoint_id}") return latest_checkpoint - async def list_checkpoint_ids(self, workflow_name: str) -> list[CheckpointID]: + async def list_checkpoint_ids(self, *, workflow_name: str) -> list[CheckpointID]: """List checkpoint IDs for a given workflow name. Args: diff --git a/python/packages/core/tests/workflow/test_agent_executor.py b/python/packages/core/tests/workflow/test_agent_executor.py index 45e7b32429..b4f431fd84 100644 --- a/python/packages/core/tests/workflow/test_agent_executor.py +++ b/python/packages/core/tests/workflow/test_agent_executor.py @@ -84,7 +84,7 @@ async def test_agent_executor_checkpoint_stores_and_restores_state() -> None: assert initial_agent.call_count == 1 # Verify checkpoint was created - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert len(checkpoints) >= 2, ( "Expected at least 2 checkpoints. The first one is after the start executor, " "and the second one is after the agent execution." diff --git a/python/packages/core/tests/workflow/test_checkpoint.py b/python/packages/core/tests/workflow/test_checkpoint.py index a72f9abec6..b05d625502 100644 --- a/python/packages/core/tests/workflow/test_checkpoint.py +++ b/python/packages/core/tests/workflow/test_checkpoint.py @@ -199,33 +199,33 @@ async def test_memory_checkpoint_storage_list(): await storage.save(checkpoint3) # Test list_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") + workflow1_checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids # Test list for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list_checkpoints("workflow-1") + workflow1_checkpoints = await storage.list_checkpoints(workflow_name="workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) assert {cp.checkpoint_id for cp in workflow1_checkpoints} == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} # Test list_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") + workflow2_checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids # Test list for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list_checkpoints("workflow-2") + workflow2_checkpoints = await storage.list_checkpoints(workflow_name="workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id # Test list_ids for non-existent workflow - empty_checkpoint_ids = await storage.list_checkpoint_ids("nonexistent-workflow") + empty_checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="nonexistent-workflow") assert len(empty_checkpoint_ids) == 0 # Test list for non-existent workflow - empty_checkpoints = await storage.list_checkpoints("nonexistent-workflow") + empty_checkpoints = await storage.list_checkpoints(workflow_name="nonexistent-workflow") assert len(empty_checkpoints) == 0 @@ -267,17 +267,17 @@ async def test_memory_checkpoint_storage_get_latest(): await storage.save(checkpoint3) # Test get_latest for workflow-1 - latest = await storage.get_latest("workflow-1") + latest = await storage.get_latest(workflow_name="workflow-1") assert latest is not None assert latest.checkpoint_id == checkpoint2.checkpoint_id # Test get_latest for workflow-2 - latest2 = await storage.get_latest("workflow-2") + latest2 = await storage.get_latest(workflow_name="workflow-2") assert latest2 is not None assert latest2.checkpoint_id == checkpoint3.checkpoint_id # Test get_latest for non-existent workflow - latest_none = await storage.get_latest("nonexistent-workflow") + latest_none = await storage.get_latest(workflow_name="nonexistent-workflow") assert latest_none is None @@ -320,7 +320,7 @@ async def finish(self, message: str, ctx: WorkflowContext[Never, str]) -> None: _ = [event async for event in workflow.run("hello", stream=True)] # Get all checkpoints sorted by timestamp - checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) + checkpoints = sorted(await storage.list_checkpoints(workflow_name=workflow.name), key=lambda c: c.timestamp) # Should have multiple checkpoints (one initial + one per superstep) assert len(checkpoints) >= 2, f"Expected at least 2 checkpoints, got {len(checkpoints)}" @@ -822,25 +822,25 @@ async def test_file_checkpoint_storage_list(): await storage.save(checkpoint3) # Test list_ids for workflow-1 - workflow1_checkpoint_ids = await storage.list_checkpoint_ids("workflow-1") + workflow1_checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="workflow-1") assert len(workflow1_checkpoint_ids) == 2 assert checkpoint1.checkpoint_id in workflow1_checkpoint_ids assert checkpoint2.checkpoint_id in workflow1_checkpoint_ids # Test list for workflow-1 (returns objects) - workflow1_checkpoints = await storage.list_checkpoints("workflow-1") + workflow1_checkpoints = await storage.list_checkpoints(workflow_name="workflow-1") assert len(workflow1_checkpoints) == 2 assert all(isinstance(cp, WorkflowCheckpoint) for cp in workflow1_checkpoints) checkpoint_ids = {cp.checkpoint_id for cp in workflow1_checkpoints} assert checkpoint_ids == {checkpoint1.checkpoint_id, checkpoint2.checkpoint_id} # Test list_ids for workflow-2 - workflow2_checkpoint_ids = await storage.list_checkpoint_ids("workflow-2") + workflow2_checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="workflow-2") assert len(workflow2_checkpoint_ids) == 1 assert checkpoint3.checkpoint_id in workflow2_checkpoint_ids # Test list for workflow-2 (returns objects) - workflow2_checkpoints = await storage.list_checkpoints("workflow-2") + workflow2_checkpoints = await storage.list_checkpoints(workflow_name="workflow-2") assert len(workflow2_checkpoints) == 1 assert workflow2_checkpoints[0].checkpoint_id == checkpoint3.checkpoint_id @@ -892,7 +892,7 @@ async def test_file_checkpoint_storage_corrupted_file(): f.write("{ invalid json }") # list should handle the corrupted file gracefully - checkpoints = await storage.list_checkpoints("any-workflow") + checkpoints = await storage.list_checkpoints(workflow_name="any-workflow") assert checkpoints == [] @@ -947,17 +947,17 @@ async def test_file_checkpoint_storage_get_latest(): await storage.save(checkpoint3) # Test get_latest for workflow-1 - latest = await storage.get_latest("workflow-1") + latest = await storage.get_latest(workflow_name="workflow-1") assert latest is not None assert latest.checkpoint_id == checkpoint2.checkpoint_id # Test get_latest for workflow-2 - latest2 = await storage.get_latest("workflow-2") + latest2 = await storage.get_latest(workflow_name="workflow-2") assert latest2 is not None assert latest2.checkpoint_id == checkpoint3.checkpoint_id # Test get_latest for non-existent workflow - latest_none = await storage.get_latest("nonexistent-workflow") + latest_none = await storage.get_latest(workflow_name="nonexistent-workflow") assert latest_none is None @@ -975,7 +975,7 @@ async def test_file_checkpoint_storage_list_ids_corrupted_file(): f.write("{ invalid json }") # list_ids should handle the corrupted file gracefully - checkpoint_ids = await storage.list_checkpoint_ids("test-workflow") + checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="test-workflow") assert len(checkpoint_ids) == 1 assert checkpoint.checkpoint_id in checkpoint_ids @@ -985,7 +985,7 @@ async def test_file_checkpoint_storage_list_ids_empty(): storage = FileCheckpointStorage(temp_dir) # Test list_ids on empty storage - checkpoint_ids = await storage.list_checkpoint_ids("any-workflow") + checkpoint_ids = await storage.list_checkpoint_ids(workflow_name="any-workflow") assert checkpoint_ids == [] diff --git a/python/packages/core/tests/workflow/test_checkpoint_validation.py b/python/packages/core/tests/workflow/test_checkpoint_validation.py index 2bea42beb4..a9c748a324 100644 --- a/python/packages/core/tests/workflow/test_checkpoint_validation.py +++ b/python/packages/core/tests/workflow/test_checkpoint_validation.py @@ -44,7 +44,7 @@ async def test_resume_fails_when_graph_mismatch() -> None: # Run once to create checkpoints _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] @@ -67,7 +67,7 @@ async def test_resume_succeeds_when_graph_matches() -> None: workflow = build_workflow(storage, finish_id="finish") _ = [event async for event in workflow.run("hello", stream=True)] # noqa: F841 - checkpoints = sorted(await storage.list_checkpoints(workflow.name), key=lambda c: c.timestamp) + checkpoints = sorted(await storage.list_checkpoints(workflow_name=workflow.name), key=lambda c: c.timestamp) target_checkpoint = checkpoints[0] resumed_workflow = build_workflow(storage, finish_id="finish") @@ -126,7 +126,7 @@ async def test_resume_succeeds_when_sub_workflow_matches() -> None: _ = [event async for event in workflow.run("hello", stream=True)] - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] @@ -150,7 +150,7 @@ async def test_resume_fails_when_sub_workflow_changes() -> None: _ = [event async for event in workflow.run("hello", stream=True)] - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) assert checkpoints, "expected at least one checkpoint to be created" target_checkpoint = checkpoints[-1] diff --git a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py index 0bd71e2d5d..dbb01d6e66 100644 --- a/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py +++ b/python/packages/core/tests/workflow/test_request_info_event_rehydrate.py @@ -149,7 +149,7 @@ async def test_checkpoint_with_pending_request_info_events(): assert request_info_event.source_executor_id == "approval_executor" # Step 2: List checkpoints to find the one with our pending request - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) assert len(checkpoints) > 0, "No checkpoints were created during workflow execution" # Find the checkpoint with our pending request @@ -241,7 +241,7 @@ async def test_checkpoint_restore_with_responses_does_not_reemit_handled_request request_id = request_info_event.request_id # Step 2: Find the checkpoint with the pending request - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) checkpoint_with_request = None for checkpoint in checkpoints: if request_id in checkpoint.pending_request_info_events: @@ -309,7 +309,7 @@ async def test_checkpoint_restore_with_partial_responses_reemits_unhandled_reque assert calc_event is not None # Step 2: Find the checkpoint with pending requests - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) checkpoint_with_requests = None for checkpoint in checkpoints: has_approval = approval_event.request_id in checkpoint.pending_request_info_events diff --git a/python/packages/core/tests/workflow/test_sub_workflow.py b/python/packages/core/tests/workflow/test_sub_workflow.py index 6bf51b13a9..666e82f4d7 100644 --- a/python/packages/core/tests/workflow/test_sub_workflow.py +++ b/python/packages/core/tests/workflow/test_sub_workflow.py @@ -595,7 +595,7 @@ async def test_sub_workflow_checkpoint_restore_no_duplicate_requests() -> None: assert first_request_id is not None # Get checkpoint - checkpoints = await storage.list_checkpoints(workflow1.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow1.name) checkpoint_id = max(checkpoints, key=lambda cp: cp.iteration_count).checkpoint_id # Step 2: Resume workflow from checkpoint diff --git a/python/packages/core/tests/workflow/test_workflow.py b/python/packages/core/tests/workflow/test_workflow.py index 4b3f85eef4..744ad827ea 100644 --- a/python/packages/core/tests/workflow/test_workflow.py +++ b/python/packages/core/tests/workflow/test_workflow.py @@ -544,7 +544,7 @@ async def test_workflow_checkpoint_runtime_only_configuration( assert result.get_final_state() == WorkflowRunState.IDLE # Verify checkpoints were created - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) assert len(checkpoints) > 0 # Find a superstep checkpoint to resume from @@ -594,8 +594,8 @@ async def test_workflow_checkpoint_runtime_overrides_buildtime( assert result is not None # Verify checkpoints were created in runtime storage, not build-time storage - buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow.name) - runtime_checkpoints = await runtime_storage.list_checkpoints(workflow.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow_name=workflow.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow_name=workflow.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/core/tests/workflow/test_workflow_agent.py b/python/packages/core/tests/workflow/test_workflow_agent.py index b32ab0d61c..2a1532502b 100644 --- a/python/packages/core/tests/workflow/test_workflow_agent.py +++ b/python/packages/core/tests/workflow/test_workflow_agent.py @@ -607,7 +607,7 @@ async def test_checkpoint_storage_passed_to_workflow(self) -> None: # Drain workflow events to get checkpoint # The workflow should have created checkpoints - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=workflow.name) assert len(checkpoints) > 0, "Checkpoints should have been created when checkpoint_storage is provided" async def test_agent_executor_output_response_false_filters_streaming_events(self): diff --git a/python/packages/devui/agent_framework_devui/_executor.py b/python/packages/devui/agent_framework_devui/_executor.py index d110563d5d..92e6301b66 100644 --- a/python/packages/devui/agent_framework_devui/_executor.py +++ b/python/packages/devui/agent_framework_devui/_executor.py @@ -430,7 +430,7 @@ async def _execute_workflow( elif hil_responses: # Only auto-resume from latest checkpoint when we have HIL responses # Regular "Run" clicks should start fresh, not resume from checkpoints - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=workflow.name) if checkpoints: latest = max(checkpoints, key=lambda cp: cp.timestamp) checkpoint_id = latest.checkpoint_id diff --git a/python/packages/orchestrations/tests/test_concurrent.py b/python/packages/orchestrations/tests/test_concurrent.py index 1c6c17b128..8712aae3fd 100644 --- a/python/packages/orchestrations/tests/test_concurrent.py +++ b/python/packages/orchestrations/tests/test_concurrent.py @@ -224,7 +224,7 @@ async def test_concurrent_checkpoint_resume_round_trip() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[1] @@ -267,7 +267,7 @@ async def test_concurrent_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert len(checkpoints) >= 2, ( "Expected at least 2 checkpoints. The first one is after the start executor, " "and the second one is after the first round of agent executions." @@ -316,8 +316,8 @@ async def test_concurrent_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) - runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow_name=wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow_name=wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/orchestrations/tests/test_group_chat.py b/python/packages/orchestrations/tests/test_group_chat.py index ff17525b9c..6544b681a0 100644 --- a/python/packages/orchestrations/tests/test_group_chat.py +++ b/python/packages/orchestrations/tests/test_group_chat.py @@ -620,7 +620,7 @@ async def test_group_chat_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert len(checkpoints) > 0, "Runtime-only checkpointing should have created checkpoints" @@ -656,8 +656,8 @@ async def test_group_chat_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) - runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow_name=wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow_name=wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/packages/orchestrations/tests/test_magentic.py b/python/packages/orchestrations/tests/test_magentic.py index f3e0fa2fa3..17b6957205 100644 --- a/python/packages/orchestrations/tests/test_magentic.py +++ b/python/packages/orchestrations/tests/test_magentic.py @@ -362,7 +362,7 @@ async def test_magentic_checkpoint_resume_round_trip(): assert req_event is not None assert isinstance(req_event.data, MagenticPlanReviewRequest) - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[-1] @@ -607,7 +607,7 @@ async def _collect_checkpoints( storage: InMemoryCheckpointStorage, workflow_name: str, ) -> list[WorkflowCheckpoint]: - checkpoints = await storage.list_checkpoints(workflow_name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow_name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) return checkpoints @@ -774,7 +774,7 @@ async def test_magentic_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert len(checkpoints) > 0, "Runtime-only checkpointing should have created checkpoints" @@ -808,8 +808,8 @@ async def test_magentic_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) - runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow_name=wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow_name=wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" @@ -858,7 +858,7 @@ async def test_magentic_checkpoint_restore_no_duplicate_history(): break # Get checkpoint - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert len(checkpoints) > 0, "Should have created checkpoints" latest_checkpoint = checkpoints[-1] diff --git a/python/packages/orchestrations/tests/test_sequential.py b/python/packages/orchestrations/tests/test_sequential.py index 370e015c44..04a4ae4141 100644 --- a/python/packages/orchestrations/tests/test_sequential.py +++ b/python/packages/orchestrations/tests/test_sequential.py @@ -146,7 +146,7 @@ async def test_sequential_checkpoint_resume_round_trip() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[0] @@ -185,7 +185,7 @@ async def test_sequential_checkpoint_runtime_only() -> None: assert baseline_output is not None - checkpoints = await storage.list_checkpoints(wf.name) + checkpoints = await storage.list_checkpoints(workflow_name=wf.name) assert checkpoints checkpoints.sort(key=lambda cp: cp.timestamp) resume_checkpoint = checkpoints[0] @@ -232,8 +232,8 @@ async def test_sequential_checkpoint_runtime_overrides_buildtime() -> None: assert baseline_output is not None - buildtime_checkpoints = await buildtime_storage.list_checkpoints(wf.name) - runtime_checkpoints = await runtime_storage.list_checkpoints(wf.name) + buildtime_checkpoints = await buildtime_storage.list_checkpoints(workflow_name=wf.name) + runtime_checkpoints = await runtime_storage.list_checkpoints(workflow_name=wf.name) assert len(runtime_checkpoints) > 0, "Runtime storage should have checkpoints" assert len(buildtime_checkpoints) == 0, "Build-time storage should have no checkpoints when overridden" diff --git a/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py b/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py index 2827dc314a..ce377b654d 100644 --- a/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py +++ b/python/samples/getting_started/orchestrations/handoff_with_tool_approval_checkpoint_resume.py @@ -139,15 +139,6 @@ def print_function_approval_request(request: Content, request_id: str) -> None: print(f"{'=' * 60}\n") -async def get_latest_checkpoint_id(storage: FileCheckpointStorage, workflow_name: str) -> str: - """Helper to get the latest checkpoint ID for a workflow.""" - checkpoints = await storage.list_checkpoints(workflow_name) - if not checkpoints: - raise RuntimeError("No checkpoints found.") - checkpoints.sort(key=lambda cp: cp.timestamp, reverse=True) - return checkpoints[0].checkpoint_id - - async def main() -> None: """ Demonstrate the checkpoint-based pause/resume pattern for handoff workflows. @@ -222,7 +213,10 @@ async def main() -> None: # This sample only expects HandoffAgentUserRequest and function approval requests raise ValueError(f"Unsupported request type: {type(request_event.data)}") - checkpoint_id = await get_latest_checkpoint_id(storage, workflow.name) + checkpoint = await storage.get_latest(workflow_name=workflow.name) + if not checkpoint: + raise RuntimeError("No checkpoints found.") + checkpoint_id = checkpoint.checkpoint_id results = await workflow.run(responses=responses, checkpoint_id=checkpoint_id) request_events = results.get_request_info_events() diff --git a/python/samples/getting_started/orchestrations/magentic_checkpoint.py b/python/samples/getting_started/orchestrations/magentic_checkpoint.py index f04a0f7655..adce878f0d 100644 --- a/python/samples/getting_started/orchestrations/magentic_checkpoint.py +++ b/python/samples/getting_started/orchestrations/magentic_checkpoint.py @@ -2,6 +2,7 @@ import asyncio import json +from datetime import datetime from pathlib import Path from typing import cast @@ -115,15 +116,11 @@ async def main() -> None: print("No plan review request emitted; nothing to resume.") return - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) - if not checkpoints: + resume_checkpoint = await checkpoint_storage.get_latest(workflow_name=workflow.name) + if not resume_checkpoint: print("No checkpoints persisted.") return - resume_checkpoint = max( - checkpoints, - key=lambda cp: (cp.iteration_count, cp.timestamp), - ) print(f"Using checkpoint {resume_checkpoint.checkpoint_id} at iteration {resume_checkpoint.iteration_count}") # Show that the checkpoint JSON indeed contains the pending plan-review request record. @@ -180,7 +177,7 @@ async def main() -> None: def _pending_message_count(cp: WorkflowCheckpoint) -> int: return sum(len(msg_list) for msg_list in cp.messages.values() if isinstance(msg_list, list)) - all_checkpoints = await checkpoint_storage.list_checkpoints(resume_checkpoint.workflow_name) + all_checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=resume_checkpoint.workflow_name) later_checkpoints_with_messages = [ cp for cp in all_checkpoints @@ -188,10 +185,7 @@ def _pending_message_count(cp: WorkflowCheckpoint) -> int: ] if later_checkpoints_with_messages: - post_plan_checkpoint = max( - later_checkpoints_with_messages, - key=lambda cp: (cp.iteration_count, cp.timestamp), - ) + post_plan_checkpoint = max(later_checkpoints_with_messages, key=lambda cp: datetime.fromisoformat(cp.timestamp)) else: later_checkpoints = [cp for cp in all_checkpoints if cp.iteration_count > resume_checkpoint.iteration_count] @@ -199,10 +193,7 @@ def _pending_message_count(cp: WorkflowCheckpoint) -> int: print("\nNo additional checkpoints recorded beyond plan approval; sample complete.") return - post_plan_checkpoint = max( - later_checkpoints, - key=lambda cp: (cp.iteration_count, cp.timestamp), - ) + post_plan_checkpoint = max(later_checkpoints, key=lambda cp: datetime.fromisoformat(cp.timestamp)) print("\n=== Stage 3: resume from post-plan checkpoint ===") pending_messages = _pending_message_count(post_plan_checkpoint) print( diff --git a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py index 85cee7a974..12cb08a8be 100644 --- a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py +++ b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_human_in_the_loop.py @@ -3,6 +3,7 @@ import asyncio import sys from dataclasses import dataclass +from datetime import datetime from pathlib import Path from typing import Any @@ -282,12 +283,12 @@ async def main() -> None: result = await run_interactive_session(workflow, initial_message=brief) print(f"Workflow completed with: {result}") - checkpoints = await storage.list_checkpoints(workflow.name) + checkpoints = await storage.list_checkpoints(workflow_name=workflow.name) if not checkpoints: print("No checkpoints recorded.") return - sorted_cps = sorted(checkpoints, key=lambda c: c.timestamp) + sorted_cps = sorted(checkpoints, key=lambda cp: datetime.fromisoformat(cp.timestamp)) print("\nAvailable checkpoints:") for idx, cp in enumerate(sorted_cps): print(f" [{idx}] id={cp.checkpoint_id} iter={cp.iteration_count}") diff --git a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py index 288f67ce31..572dd4f0ee 100644 --- a/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py +++ b/python/samples/getting_started/workflows/checkpoint/checkpoint_with_resume.py @@ -140,10 +140,9 @@ async def main(): break # Find the latest checkpoint to resume from - all_checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) - if not all_checkpoints: + latest_checkpoint = await checkpoint_storage.get_latest(workflow_name=workflow.name) + if not latest_checkpoint: raise RuntimeError("No checkpoints available to resume from.") - latest_checkpoint = all_checkpoints[-1] print( f"Checkpoint {latest_checkpoint.checkpoint_id}: " f"(iter={latest_checkpoint.iteration_count}, messages={latest_checkpoint.messages})" diff --git a/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py b/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py index 98cfcaaaaf..833bd7c920 100644 --- a/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py +++ b/python/samples/getting_started/workflows/checkpoint/sub_workflow_checkpoint.py @@ -345,14 +345,12 @@ async def main() -> None: if request_id is None: raise RuntimeError("Sub-workflow completed without requesting review.") - checkpoints = await storage.list_checkpoints(workflow.name) - if not checkpoints: + resume_checkpoint = await storage.get_latest(workflow_name=workflow.name) + if not resume_checkpoint: raise RuntimeError("No checkpoints found.") # Print the checkpoint to show pending requests # We didn't handle the request above so the request is still pending the last checkpoint - checkpoints.sort(key=lambda cp: cp.timestamp) - resume_checkpoint = checkpoints[-1] print(f"Using checkpoint {resume_checkpoint.checkpoint_id} at iteration {resume_checkpoint.iteration_count}") checkpoint_path = storage.storage_path / f"{resume_checkpoint.checkpoint_id}.json" diff --git a/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py b/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py index cc2dc8100e..552ced2892 100644 --- a/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py +++ b/python/samples/getting_started/workflows/checkpoint/workflow_as_agent_checkpoint.py @@ -69,7 +69,7 @@ async def basic_checkpointing() -> None: print(f"[{speaker}]: {msg.text}") # Show checkpoints that were created - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=workflow.name) print(f"\nCheckpoints created: {len(checkpoints)}") for i, cp in enumerate(checkpoints[:5], 1): print(f" {i}. {cp.checkpoint_id}") @@ -110,7 +110,7 @@ async def checkpointing_with_thread() -> None: print(f"[assistant]: {response2.messages[0].text}") # Show accumulated state - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=workflow.name) print(f"\nTotal checkpoints across both turns: {len(checkpoints)}") if thread.message_store: @@ -147,7 +147,7 @@ async def streaming_with_checkpoints() -> None: print() # Newline after streaming - checkpoints = await checkpoint_storage.list_checkpoints(workflow.name) + checkpoints = await checkpoint_storage.list_checkpoints(workflow_name=workflow.name) print(f"\nCheckpoints created during stream: {len(checkpoints)}")