-
Notifications
You must be signed in to change notification settings - Fork 578
feat(openai-agents): Set system instruction attribute on gen_ai.chat spans
#5370
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: webb/openai-responses-system-instruction
Are you sure you want to change the base?
Changes from all commits
085b496
48c7fbe
ebf2d9c
fc9f1fa
bcdd87c
68b853f
5ee5274
74b5a4c
eda980c
5825835
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,48 @@ | ||
| from collections.abc import Iterable | ||
|
|
||
| from typing import TYPE_CHECKING | ||
|
|
||
| if TYPE_CHECKING: | ||
| from sentry_sdk._types import TextPart | ||
|
|
||
| from openai.types.chat import ( | ||
| ChatCompletionMessageParam, | ||
| ChatCompletionSystemMessageParam, | ||
| ) | ||
|
|
||
|
|
||
| def _is_system_instruction(message: "ChatCompletionMessageParam") -> bool: | ||
| return isinstance(message, dict) and message.get("role") == "system" | ||
|
|
||
|
|
||
| def _get_system_instructions( | ||
| messages: "Iterable[ChatCompletionMessageParam]", | ||
| ) -> "list[ChatCompletionMessageParam]": | ||
| if not isinstance(messages, Iterable): | ||
| return [] | ||
|
|
||
| return [message for message in messages if _is_system_instruction(message)] | ||
|
|
||
|
|
||
| def _transform_system_instructions( | ||
| system_instructions: "list[ChatCompletionSystemMessageParam]", | ||
| ) -> "list[TextPart]": | ||
| instruction_text_parts: "list[TextPart]" = [] | ||
|
|
||
| for instruction in system_instructions: | ||
| if not isinstance(instruction, dict): | ||
| continue | ||
|
|
||
| content = instruction.get("content") | ||
|
|
||
| if isinstance(content, str): | ||
| instruction_text_parts.append({"type": "text", "content": content}) | ||
|
|
||
| elif isinstance(content, list): | ||
| for part in content: | ||
| if isinstance(part, dict) and part.get("type") == "text": | ||
| text = part.get("text", "") | ||
| if text: | ||
| instruction_text_parts.append({"type": "text", "content": text}) | ||
|
|
||
| return instruction_text_parts | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,22 @@ | ||
| from typing import TYPE_CHECKING | ||
|
|
||
| if TYPE_CHECKING: | ||
| from typing import Union | ||
|
|
||
| from openai.types.responses import ResponseInputParam, ResponseInputItemParam | ||
|
|
||
|
|
||
| def _is_system_instruction(message: "ResponseInputItemParam") -> bool: | ||
| if not isinstance(message, dict) or not message.get("role") == "system": | ||
| return False | ||
|
|
||
| return "type" not in message or message["type"] == "message" | ||
|
|
||
|
|
||
| def _get_system_instructions( | ||
| messages: "Union[str, ResponseInputParam]", | ||
| ) -> "list[ResponseInputItemParam]": | ||
| if not isinstance(messages, list): | ||
| return [] | ||
|
|
||
| return [message for message in messages if _is_system_instruction(message)] |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -11,14 +11,20 @@ | |
| from sentry_sdk.scope import should_send_default_pii | ||
| from sentry_sdk.tracing_utils import set_span_errored | ||
| from sentry_sdk.utils import event_from_exception, safe_serialize | ||
| from sentry_sdk.ai._openai_completions_api import _transform_system_instructions | ||
| from sentry_sdk.ai._openai_responses_api import ( | ||
| _is_system_instruction, | ||
| _get_system_instructions, | ||
| ) | ||
|
|
||
| from typing import TYPE_CHECKING | ||
|
|
||
| if TYPE_CHECKING: | ||
| from typing import Any | ||
| from agents import Usage | ||
| from agents import Usage, TResponseInputItem | ||
|
|
||
| from sentry_sdk.tracing import Span | ||
| from sentry_sdk._types import TextPart | ||
|
|
||
| try: | ||
| import agents | ||
|
|
@@ -115,19 +121,39 @@ def _set_input_data( | |
| return | ||
| request_messages = [] | ||
|
|
||
| system_instructions = get_response_kwargs.get("system_instructions") | ||
| if system_instructions: | ||
| request_messages.append( | ||
| messages: "str | list[TResponseInputItem]" = get_response_kwargs.get("input", []) | ||
|
|
||
| instructions_text_parts: "list[TextPart]" = [] | ||
| explicit_instructions = get_response_kwargs.get("system_instructions") | ||
| if explicit_instructions is not None: | ||
| instructions_text_parts.append( | ||
| { | ||
| "role": GEN_AI_ALLOWED_MESSAGE_ROLES.SYSTEM, | ||
| "content": [{"type": "text", "text": system_instructions}], | ||
| "type": "text", | ||
| "content": explicit_instructions, | ||
| } | ||
| ) | ||
|
|
||
| for message in get_response_kwargs.get("input", []): | ||
| system_instructions = _get_system_instructions(messages) | ||
|
|
||
| # Deliberate use of function accepting completions API type because | ||
| # of shared structure FOR THIS PURPOSE ONLY. | ||
| instructions_text_parts += _transform_system_instructions(system_instructions) | ||
|
|
||
| if len(instructions_text_parts) > 0: | ||
| set_data_normalized( | ||
| span, | ||
| SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS, | ||
| instructions_text_parts, | ||
| unpack=False, | ||
| ) | ||
|
|
||
| non_system_messages = [ | ||
| message for message in messages if not _is_system_instruction(message) | ||
| ] | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Missing string input handling causes AttributeError crashMedium Severity The Additional Locations (1)
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The code throws an exception when passed a string before, so I consider this out of scope. |
||
| for message in non_system_messages: | ||
| if "role" in message: | ||
| normalized_role = normalize_message_role(message.get("role")) | ||
| content = message.get("content") | ||
| normalized_role = normalize_message_role(message.get("role")) # type: ignore | ||
| content = message.get("content") # type: ignore | ||
| request_messages.append( | ||
| { | ||
| "role": normalized_role, | ||
|
|
@@ -139,14 +165,14 @@ def _set_input_data( | |
| } | ||
| ) | ||
| else: | ||
| if message.get("type") == "function_call": | ||
| if message.get("type") == "function_call": # type: ignore | ||
| request_messages.append( | ||
| { | ||
| "role": GEN_AI_ALLOWED_MESSAGE_ROLES.ASSISTANT, | ||
| "content": [message], | ||
| } | ||
| ) | ||
| elif message.get("type") == "function_call_output": | ||
| elif message.get("type") == "function_call_output": # type: ignore | ||
| request_messages.append( | ||
| { | ||
| "role": GEN_AI_ALLOWED_MESSAGE_ROLES.TOOL, | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.