From 3ec14284ac824b42c966123d021169acef13fd2f Mon Sep 17 00:00:00 2001 From: Matt Campbell Date: Tue, 20 Jan 2026 12:41:19 -0500 Subject: [PATCH] fix(openai): normalize Pydantic objects before JSON serialization in truncate_messages_by_size The OpenAI SDK v1+ returns Pydantic model instances (e.g., ResponseFunctionToolCall) which are not directly JSON serializable. When these objects are passed to truncate_messages_by_size(), the json.dumps() call fails with a TypeError. The _normalize_data() helper already exists and properly handles Pydantic objects by calling .model_dump(), but it wasn't being used in truncate_messages_by_size(). This fix adds normalization at the start of truncate_messages_by_size() to ensure all Pydantic objects are converted to JSON-compatible dicts before serialization. Fixes #5350 --- sentry_sdk/ai/utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index a4ebe96d99..b4fa0e286a 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -660,6 +660,10 @@ def truncate_messages_by_size( In the single message case, the serialized message size may exceed `max_bytes`, because truncation is based only on character count in that case. """ + # Normalize messages to ensure JSON serialization works + # (handles Pydantic objects from OpenAI SDK v1+) + messages = _normalize_data(messages, unpack=False) + serialized_json = json.dumps(messages, separators=(",", ":")) current_size = len(serialized_json.encode("utf-8"))