-
Notifications
You must be signed in to change notification settings - Fork 873
Implement OpenAI Responses API instrumentation and examples #4166
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
f3fbd78
2e568bb
0f670d9
8987864
02a80cd
88c7908
65ee9c0
e4378e8
9bbdd62
73df7d6
3c0c4bb
c15627f
5e26cdd
3f69b56
9eeac24
e2604b6
8f4679e
11d1b3c
bc1a604
67b49ed
0d77913
a8c5f43
4962a9f
f2f0b48
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||
|---|---|---|---|---|
| @@ -0,0 +1,122 @@ | ||||
| # Copyright The OpenTelemetry Authors | ||||
| # | ||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| # you may not use this file except in compliance with the License. | ||||
| # You may obtain a copy of the License at | ||||
| # | ||||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||||
| # | ||||
| # Unless required by applicable law or agreed to in writing, software | ||||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | ||||
|
|
||||
| from typing import TYPE_CHECKING | ||||
|
|
||||
| from opentelemetry.semconv._incubating.attributes import ( | ||||
| gen_ai_attributes as GenAIAttributes, | ||||
| ) | ||||
| from opentelemetry.util.genai.types import Error, LLMInvocation | ||||
|
|
||||
| from .response_extractors import ( | ||||
| GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS, | ||||
| GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS, | ||||
| OPENAI, | ||||
| _extract_input_messages, | ||||
| _extract_output_type, | ||||
| _extract_system_instruction, | ||||
| _set_invocation_response_attributes, | ||||
| ) | ||||
| from .response_wrappers import ResponseStreamWrapper | ||||
| from .utils import get_llm_request_attributes, is_streaming | ||||
|
|
||||
| if TYPE_CHECKING: | ||||
| from opentelemetry.util.genai.handler import TelemetryHandler | ||||
|
|
||||
| __all__ = [ | ||||
| "responses_create", | ||||
| "_set_invocation_response_attributes", | ||||
| "GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS", | ||||
| "GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS", | ||||
| ] | ||||
|
|
||||
| # --------------------------------------------------------------------------- | ||||
| # Patch functions | ||||
| # --------------------------------------------------------------------------- | ||||
|
|
||||
|
|
||||
| def responses_create( | ||||
| handler: "TelemetryHandler", | ||||
| capture_content: bool, | ||||
vasantteja marked this conversation as resolved.
Show resolved
Hide resolved
|
||||
| ): | ||||
| """Wrap the `create` method of the `Responses` class to trace it.""" | ||||
| # https://github.com/openai/openai-python/blob/dc68b90655912886bd7a6c7787f96005452ebfc9/src/openai/resources/responses/responses.py#L828 | ||||
|
|
||||
| def traced_method(wrapped, instance, args, kwargs): | ||||
| if Error is None or LLMInvocation is None: | ||||
| raise ModuleNotFoundError( | ||||
| "opentelemetry.util.genai.types is unavailable" | ||||
| ) | ||||
|
|
||||
| operation_name = GenAIAttributes.GenAiOperationNameValues.CHAT.value | ||||
| span_attributes = get_llm_request_attributes( | ||||
| kwargs, | ||||
| instance, | ||||
| operation_name, | ||||
| ) | ||||
| output_type = _extract_output_type(kwargs) | ||||
| if output_type: | ||||
| span_attributes[GenAIAttributes.GEN_AI_OUTPUT_TYPE] = output_type | ||||
| request_model = str( | ||||
| span_attributes.get(GenAIAttributes.GEN_AI_REQUEST_MODEL) | ||||
| or "unknown" | ||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no need to set model, it should not be set when not provided |
||||
| ) | ||||
| streaming = is_streaming(kwargs) | ||||
|
|
||||
| invocation = handler.start_llm( | ||||
| LLMInvocation( | ||||
| request_model=request_model, | ||||
| operation_name=operation_name, | ||||
| provider=OPENAI, | ||||
| input_messages=_extract_input_messages(kwargs) | ||||
| if capture_content | ||||
| else [], | ||||
| system_instruction=_extract_system_instruction(kwargs) | ||||
| if capture_content | ||||
| else [], | ||||
| attributes=span_attributes.copy(), | ||||
| metric_attributes={ | ||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. it should not be necessary - opentelemetry-python-contrib/util/opentelemetry-util-genai/src/opentelemetry/util/genai/metrics.py Line 63 in a025a20
|
||||
| GenAIAttributes.GEN_AI_OPERATION_NAME: operation_name | ||||
| }, | ||||
| ) | ||||
| ) | ||||
|
|
||||
| try: | ||||
| result = wrapped(*args, **kwargs) | ||||
| if hasattr(result, "parse"): | ||||
| parsed_result = result.parse() | ||||
| else: | ||||
| parsed_result = result | ||||
|
|
||||
| if streaming: | ||||
| return ResponseStreamWrapper( | ||||
| parsed_result, | ||||
| handler, | ||||
| invocation, | ||||
| capture_content, | ||||
| ) | ||||
|
|
||||
| _set_invocation_response_attributes( | ||||
| invocation, parsed_result, capture_content | ||||
| ) | ||||
| handler.stop_llm(invocation) | ||||
| return result | ||||
|
|
||||
| except Exception as error: | ||||
| handler.fail_llm( | ||||
| invocation, Error(message=str(error), type=type(error)) | ||||
| ) | ||||
| raise | ||||
|
|
||||
| return traced_method | ||||
Uh oh!
There was an error while loading. Please reload this page.