Skip to content

Commit 8d64be5

Browse files
committed
Fix langchain merge
* DONT USE context manager since we're doing manual span management
1 parent ba20a39 commit 8d64be5

File tree

1 file changed

+71
-53
lines changed

1 file changed

+71
-53
lines changed

sentry_sdk/integrations/langchain.py

Lines changed: 71 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,9 @@ class LangchainIntegration(Integration):
6565
# The most number of spans (e.g., LLM calls) that can be processed at the same time.
6666
max_spans = 1024
6767

68-
def __init__(self, include_prompts: bool = True, max_spans: int = 1024) -> None:
68+
def __init__(
69+
self: LangchainIntegration, include_prompts: bool = True, max_spans: int = 1024
70+
) -> None:
6971
self.include_prompts = include_prompts
7072
self.max_spans = max_spans
7173

@@ -79,7 +81,7 @@ def setup_once() -> None:
7981

8082

8183
class WatchedSpan:
82-
span: Optional[Span] = None
84+
span: Span
8385
children: List[WatchedSpan] = []
8486
is_pipeline: bool = False
8587

@@ -96,6 +98,7 @@ def __init__(self, max_span_map_size: int, include_prompts: bool) -> None:
9698
self.include_prompts = include_prompts
9799

98100
def gc_span_map(self) -> None:
101+
99102
while len(self.span_map) > self.max_span_map_size:
100103
run_id, watched_span = self.span_map.popitem(last=False)
101104
self._exit_span(watched_span, run_id)
@@ -105,13 +108,12 @@ def _handle_error(self, run_id: UUID, error: Any) -> None:
105108
if not run_id or run_id not in self.span_map:
106109
return
107110

111+
sentry_sdk.capture_exception(error)
112+
108113
span_data = self.span_map[run_id]
109114
span = span_data.span
110-
span.set_status("unknown")
111-
112-
sentry_sdk.capture_exception(error, span.scope)
113-
114-
span.__exit__(None, None, None)
115+
span.set_status(SPANSTATUS.INTERNAL_ERROR)
116+
span.finish()
115117
del self.span_map[run_id]
116118

117119
def _normalize_langchain_message(self, message: BaseMessage) -> Any:
@@ -120,23 +122,28 @@ def _normalize_langchain_message(self, message: BaseMessage) -> Any:
120122
return parsed
121123

122124
def _create_span(
123-
self, run_id: UUID, parent_id: Optional[UUID], **kwargs: Any
125+
self: SentryLangchainCallback,
126+
run_id: UUID,
127+
parent_id: Optional[Any],
128+
**kwargs: Any,
124129
) -> WatchedSpan:
125-
watched_span: Optional[WatchedSpan] = None
126-
if parent_id:
127-
parent_span: Optional[WatchedSpan] = self.span_map.get(parent_id)
128-
if parent_span:
129-
watched_span = WatchedSpan(parent_span.span.start_child(**kwargs))
130-
parent_span.children.append(watched_span)
131-
132-
if watched_span is None:
133-
watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs))
130+
parent_watched_span = self.span_map.get(parent_id) if parent_id else None
131+
sentry_span = sentry_sdk.start_span(
132+
parent_span=parent_watched_span.span if parent_watched_span else None,
133+
only_as_child_span=True,
134+
**kwargs,
135+
)
136+
watched_span = WatchedSpan(sentry_span)
137+
if parent_watched_span:
138+
parent_watched_span.children.append(watched_span)
134139

135-
watched_span.span.__enter__()
140+
self.span_map[run_id] = watched_span
136141
self.gc_span_map()
137142
return watched_span
138143

139-
def _exit_span(self, span_data: WatchedSpan, run_id: UUID) -> None:
144+
def _exit_span(
145+
self: SentryLangchainCallback, span_data: WatchedSpan, run_id: UUID
146+
) -> None:
140147
if span_data.is_pipeline:
141148
set_ai_pipeline_name(None)
142149

@@ -180,16 +187,16 @@ def on_llm_start(
180187
span = watched_span.span
181188

182189
if model:
183-
span.set_attribute(
190+
span.set_data(
184191
SPANDATA.GEN_AI_REQUEST_MODEL,
185192
model,
186193
)
187194

188195
ai_type = all_params.get("_type", "")
189196
if "anthropic" in ai_type:
190-
span.set_attribute(SPANDATA.GEN_AI_SYSTEM, "anthropic")
197+
span.set_data(SPANDATA.GEN_AI_SYSTEM, "anthropic")
191198
elif "openai" in ai_type:
192-
span.set_attribute(SPANDATA.GEN_AI_SYSTEM, "openai")
199+
span.set_data(SPANDATA.GEN_AI_SYSTEM, "openai")
193200

194201
for key, attribute in DATA_FIELDS.items():
195202
if key in all_params and all_params[key] is not None:
@@ -230,15 +237,15 @@ def on_chat_model_start(
230237
)
231238
span = watched_span.span
232239

233-
span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
240+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
234241
if model:
235-
span.set_attribute(SPANDATA.GEN_AI_REQUEST_MODEL, model)
242+
span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model)
236243

237244
ai_type = all_params.get("_type", "")
238245
if "anthropic" in ai_type:
239-
span.set_attribute(SPANDATA.GEN_AI_SYSTEM, "anthropic")
246+
span.set_data(SPANDATA.GEN_AI_SYSTEM, "anthropic")
240247
elif "openai" in ai_type:
241-
span.set_attribute(SPANDATA.GEN_AI_SYSTEM, "openai")
248+
span.set_data(SPANDATA.GEN_AI_SYSTEM, "openai")
242249

243250
for key, attribute in DATA_FIELDS.items():
244251
if key in all_params and all_params[key] is not None:
@@ -255,7 +262,11 @@ def on_chat_model_start(
255262
)
256263

257264
def on_chat_model_end(
258-
self, response: LLMResult, *, run_id: UUID, **kwargs: Any
265+
self: SentryLangchainCallback,
266+
response: LLMResult,
267+
*,
268+
run_id: UUID,
269+
**kwargs: Any,
259270
) -> Any:
260271
"""Run when Chat Model ends running."""
261272
with capture_internal_exceptions():
@@ -299,16 +310,14 @@ def on_llm_end(
299310
try:
300311
response_model = generation.generation_info.get("model_name")
301312
if response_model is not None:
302-
span.set_attribute(
303-
SPANDATA.GEN_AI_RESPONSE_MODEL, response_model
304-
)
313+
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, response_model)
305314
except AttributeError:
306315
pass
307316

308317
try:
309318
finish_reason = generation.generation_info.get("finish_reason")
310319
if finish_reason is not None:
311-
span.set_attribute(
320+
span.set_data(
312321
SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS, finish_reason
313322
)
314323
except AttributeError:
@@ -347,13 +356,21 @@ def on_llm_error(
347356
self._handle_error(run_id, error)
348357

349358
def on_chat_model_error(
350-
self, error: Union[Exception, KeyboardInterrupt], *, run_id: UUID, **kwargs: Any
359+
self: SentryLangchainCallback,
360+
error: Union[Exception, KeyboardInterrupt],
361+
*,
362+
run_id: UUID,
363+
**kwargs: Any,
351364
) -> Any:
352365
"""Run when Chat Model errors."""
353366
self._handle_error(run_id, error)
354367

355368
def on_agent_finish(
356-
self, finish: AgentFinish, *, run_id: UUID, **kwargs: Any
369+
self: SentryLangchainCallback,
370+
finish: AgentFinish,
371+
*,
372+
run_id: UUID,
373+
**kwargs: Any,
357374
) -> Any:
358375
with capture_internal_exceptions():
359376
if not run_id or run_id not in self.span_map:
@@ -395,12 +412,12 @@ def on_tool_start(
395412
)
396413
span = watched_span.span
397414

398-
span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "execute_tool")
399-
span.set_attribute(SPANDATA.GEN_AI_TOOL_NAME, tool_name)
415+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "execute_tool")
416+
span.set_data(SPANDATA.GEN_AI_TOOL_NAME, tool_name)
400417

401418
tool_description = serialized.get("description")
402419
if tool_description is not None:
403-
span.set_attribute(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool_description)
420+
span.set_data(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool_description)
404421

405422
if should_send_default_pii() and self.include_prompts:
406423
set_data_normalized(
@@ -519,13 +536,13 @@ def _record_token_usage(span: Span, response: Any) -> None:
519536
)
520537

521538
if input_tokens is not None:
522-
span.set_attribute(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, input_tokens)
539+
span.set_data(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, input_tokens)
523540

524541
if output_tokens is not None:
525-
span.set_attribute(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens)
542+
span.set_data(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens)
526543

527544
if total_tokens is not None:
528-
span.set_attribute(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, total_tokens)
545+
span.set_data(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, total_tokens)
529546

530547

531548
def _get_request_data(
@@ -642,7 +659,7 @@ def new_configure(
642659
def _wrap_agent_executor_invoke(f: Callable[..., Any]) -> Callable[..., Any]:
643660

644661
@wraps(f)
645-
def new_invoke(self, *args: Any, **kwargs: Any) -> Any:
662+
def new_invoke(self: Any, *args: Any, **kwargs: Any) -> Any:
646663
integration = sentry_sdk.get_client().get_integration(LangchainIntegration)
647664
if integration is None:
648665
return f(self, *args, **kwargs)
@@ -655,10 +672,10 @@ def new_invoke(self, *args: Any, **kwargs: Any) -> Any:
655672
origin=LangchainIntegration.origin,
656673
) as span:
657674
if agent_name:
658-
span.set_attribute(SPANDATA.GEN_AI_AGENT_NAME, agent_name)
675+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)
659676

660-
span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
661-
span.set_attribute(SPANDATA.GEN_AI_RESPONSE_STREAMING, False)
677+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
678+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, False)
662679

663680
if tools:
664681
set_data_normalized(
@@ -688,7 +705,7 @@ def new_invoke(self, *args: Any, **kwargs: Any) -> Any:
688705
and should_send_default_pii()
689706
and integration.include_prompts
690707
):
691-
span.set_attribute(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
708+
span.set_data(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
692709

693710
return result
694711

@@ -698,7 +715,7 @@ def new_invoke(self, *args: Any, **kwargs: Any) -> Any:
698715
def _wrap_agent_executor_stream(f: Callable[..., Any]) -> Callable[..., Any]:
699716

700717
@wraps(f)
701-
def new_stream(self, *args: Any, **kwargs: Any) -> Any:
718+
def new_stream(self: Any, *args: Any, **kwargs: Any) -> Any:
702719
integration = sentry_sdk.get_client().get_integration(LangchainIntegration)
703720
if integration is None:
704721
return f(self, *args, **kwargs)
@@ -710,13 +727,12 @@ def new_stream(self, *args: Any, **kwargs: Any) -> Any:
710727
name=f"invoke_agent {agent_name}".strip(),
711728
origin=LangchainIntegration.origin,
712729
)
713-
span.__enter__()
714730

715731
if agent_name:
716-
span.set_attribute(SPANDATA.GEN_AI_AGENT_NAME, agent_name)
732+
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)
717733

718-
span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
719-
span.set_attribute(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
734+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent")
735+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
720736

721737
if tools:
722738
set_data_normalized(
@@ -756,9 +772,10 @@ def new_iterator() -> Iterator[Any]:
756772
and should_send_default_pii()
757773
and integration.include_prompts
758774
):
759-
span.set_attribute(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
775+
span.set_data(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
760776

761-
span.__exit__(None, None, None)
777+
span.set_status(SPANSTATUS.OK)
778+
span.finish()
762779

763780
async def new_iterator_async() -> AsyncIterator[Any]:
764781
async for event in old_iterator:
@@ -774,9 +791,10 @@ async def new_iterator_async() -> AsyncIterator[Any]:
774791
and should_send_default_pii()
775792
and integration.include_prompts
776793
):
777-
span.set_attribute(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
794+
span.set_data(SPANDATA.GEN_AI_RESPONSE_TEXT, output)
778795

779-
span.__exit__(None, None, None)
796+
span.set_status(SPANSTATUS.OK)
797+
span.finish()
780798

781799
if str(type(result)) == "<class 'async_generator'>":
782800
result = new_iterator_async()

0 commit comments

Comments
 (0)