From dd166a8fea49118f14c579086718f443a4b980ad Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 13 Aug 2025 14:21:06 +0200 Subject: [PATCH 1/2] updated tool span --- sentry_sdk/integrations/langchain.py | 43 ++++++++++++++++++---------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 0b377169d0..36e07dbe95 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -257,6 +257,7 @@ def on_llm_start( ): # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any """Run when LLM starts running.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -286,6 +287,7 @@ def on_llm_start( def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any """Run when Chat Model starts running.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -327,6 +329,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): def on_chat_model_end(self, response, *, run_id, **kwargs): # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any """Run when Chat Model ends running.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -395,6 +398,7 @@ def on_llm_new_token(self, token, *, run_id, **kwargs): def on_llm_end(self, response, *, run_id, **kwargs): # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any """Run when LLM ends running.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -450,23 +454,27 @@ def on_llm_end(self, response, *, run_id, **kwargs): def on_llm_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when LLM errors.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): self._handle_error(run_id, error) def on_chat_model_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when Chat Model errors.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): self._handle_error(run_id, error) def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any """Run when chain starts running.""" + # import ipdb; ipdb.set_trace() pass def on_chain_end(self, outputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any """Run when chain ends running.""" + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: return @@ -479,10 +487,12 @@ def on_chain_end(self, outputs, *, run_id, **kwargs): def on_chain_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when chain errors.""" + # import ipdb; ipdb.set_trace() self._handle_error(run_id, error) def on_agent_action(self, action, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -502,6 +512,7 @@ def on_agent_action(self, action, *, run_id, **kwargs): def on_agent_finish(self, finish, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any + # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -523,28 +534,31 @@ def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): with capture_internal_exceptions(): if not run_id: return + + tool_name = serialized.get("name") or kwargs.get("name") + watched_span = self._create_span( run_id, kwargs.get("parent_run_id"), op=OP.GEN_AI_EXECUTE_TOOL, - name=serialized.get("name") or kwargs.get("name") or "AI tool usage", + name=f"execute_tool {tool_name}", origin=LangchainIntegration.origin, ) - watched_span.span.set_data( - SPANDATA.GEN_AI_TOOL_NAME, serialized.get("name") - ) + span = watched_span.span + + span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "execute_tool") + span.set_data(SPANDATA.GEN_AI_TOOL_NAME, tool_name) + + tool_description = serialized.get("description") + if tool_description is not None: + span.set_data(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool_description) + if should_send_default_pii() and self.include_prompts: set_data_normalized( - watched_span.span, - SPANDATA.GEN_AI_REQUEST_MESSAGES, + span, + SPANDATA.GEN_AI_TOOL_INPUT, kwargs.get("inputs", [input_str]), ) - if kwargs.get("metadata"): - set_data_normalized( - watched_span.span, - SPANDATA.GEN_AI_REQUEST_METADATA, - kwargs.get("metadata"), - ) def on_tool_end(self, output, *, run_id, **kwargs): # type: (SentryLangchainCallback, str, UUID, Any) -> Any @@ -557,14 +571,13 @@ def on_tool_end(self, output, *, run_id, **kwargs): if not span_data: return if should_send_default_pii() and self.include_prompts: - set_data_normalized( - span_data.span, SPANDATA.GEN_AI_RESPONSE_TEXT, output - ) + set_data_normalized(span_data.span, SPANDATA.GEN_AI_TOOL_OUTPUT, output) self._exit_span(span_data, run_id) def on_tool_error(self, error, *args, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when tool errors.""" + # import ipdb; ipdb.set_trace() # TODO(shellmayr): how to correctly set the status when the toolfails if run_id and run_id in self.span_map: span_data = self.span_map[run_id] From adaabbf7e9b310486d44ab0a445164ae66ed4988 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 13 Aug 2025 14:23:11 +0200 Subject: [PATCH 2/2] . --- sentry_sdk/integrations/langchain.py | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 36e07dbe95..b26d752bf3 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -257,7 +257,6 @@ def on_llm_start( ): # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any """Run when LLM starts running.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -287,7 +286,6 @@ def on_llm_start( def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any """Run when Chat Model starts running.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -329,7 +327,6 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): def on_chat_model_end(self, response, *, run_id, **kwargs): # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any """Run when Chat Model ends running.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -398,7 +395,6 @@ def on_llm_new_token(self, token, *, run_id, **kwargs): def on_llm_end(self, response, *, run_id, **kwargs): # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any """Run when LLM ends running.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -454,27 +450,23 @@ def on_llm_end(self, response, *, run_id, **kwargs): def on_llm_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when LLM errors.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): self._handle_error(run_id, error) def on_chat_model_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when Chat Model errors.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): self._handle_error(run_id, error) def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any """Run when chain starts running.""" - # import ipdb; ipdb.set_trace() pass def on_chain_end(self, outputs, *, run_id, **kwargs): # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any """Run when chain ends running.""" - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: return @@ -487,12 +479,10 @@ def on_chain_end(self, outputs, *, run_id, **kwargs): def on_chain_error(self, error, *, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when chain errors.""" - # import ipdb; ipdb.set_trace() self._handle_error(run_id, error) def on_agent_action(self, action, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -512,7 +502,6 @@ def on_agent_action(self, action, *, run_id, **kwargs): def on_agent_finish(self, finish, *, run_id, **kwargs): # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any - # import ipdb; ipdb.set_trace() with capture_internal_exceptions(): if not run_id: return @@ -577,12 +566,12 @@ def on_tool_end(self, output, *, run_id, **kwargs): def on_tool_error(self, error, *args, run_id, **kwargs): # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any """Run when tool errors.""" - # import ipdb; ipdb.set_trace() - # TODO(shellmayr): how to correctly set the status when the toolfails + # TODO(shellmayr): how to correctly set the status when the tool fails? if run_id and run_id in self.span_map: span_data = self.span_map[run_id] if span_data: span_data.span.set_status("unknown") + self._handle_error(run_id, error)