|
27 | 27 | Callbacks, |
28 | 28 | ) |
29 | 29 | from langchain_core.agents import AgentAction, AgentFinish |
| 30 | + from langchain.agents import AgentExecutor |
| 31 | + |
30 | 32 | except ImportError: |
31 | 33 | raise DidNotEnable("langchain not installed") |
32 | 34 |
|
@@ -64,6 +66,10 @@ def setup_once(): |
64 | 66 | # type: () -> None |
65 | 67 | manager._configure = _wrap_configure(manager._configure) |
66 | 68 |
|
| 69 | + if AgentExecutor is not None: |
| 70 | + AgentExecutor.invoke = _wrap_agent_executor_invoke(AgentExecutor.invoke) |
| 71 | + AgentExecutor.stream = _wrap_agent_executor_stream(AgentExecutor.stream) |
| 72 | + |
67 | 73 |
|
68 | 74 | class WatchedSpan: |
69 | 75 | span = None # type: Span |
@@ -257,7 +263,7 @@ def on_llm_start( |
257 | 263 |
|
258 | 264 | watched_span = self._create_span( |
259 | 265 | run_id=run_id, |
260 | | - parent_id=kwargs.get("parent_run_id"), |
| 266 | + parent_id=parent_run_id, |
261 | 267 | op=OP.GEN_AI_PIPELINE, |
262 | 268 | name=kwargs.get("name") or "Langchain LLM call", |
263 | 269 | origin=LangchainIntegration.origin, |
@@ -474,22 +480,7 @@ def on_chain_error(self, error, *, run_id, **kwargs): |
474 | 480 |
|
475 | 481 | def on_agent_action(self, action, *, run_id, **kwargs): |
476 | 482 | # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any |
477 | | - with capture_internal_exceptions(): |
478 | | - if not run_id: |
479 | | - return |
480 | | - watched_span = self._create_span( |
481 | | - run_id=run_id, |
482 | | - parent_id=kwargs.get("parent_run_id"), |
483 | | - op=OP.GEN_AI_INVOKE_AGENT, |
484 | | - name=action.tool or "AI tool usage", |
485 | | - origin=LangchainIntegration.origin, |
486 | | - ) |
487 | | - if action.tool_input and should_send_default_pii() and self.include_prompts: |
488 | | - set_data_normalized( |
489 | | - watched_span.span, |
490 | | - SPANDATA.GEN_AI_REQUEST_MESSAGES, |
491 | | - action.tool_input, |
492 | | - ) |
| 483 | + pass |
493 | 484 |
|
494 | 485 | def on_agent_finish(self, finish, *, run_id, **kwargs): |
495 | 486 | # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any |
@@ -648,3 +639,63 @@ def new_configure( |
648 | 639 | ) |
649 | 640 |
|
650 | 641 | return new_configure |
| 642 | + |
| 643 | + |
| 644 | +def _wrap_agent_executor_invoke(f): |
| 645 | + # type: (Callable[..., Any]) -> Callable[..., Any] |
| 646 | + |
| 647 | + @wraps(f) |
| 648 | + def new_invoke(self, *args, **kwargs): |
| 649 | + # type: (Any, Any, Any) -> Any |
| 650 | + |
| 651 | + integration = sentry_sdk.get_client().get_integration(LangchainIntegration) |
| 652 | + if integration is None: |
| 653 | + return f(self, *args, **kwargs) |
| 654 | + |
| 655 | + # Create a span that will act as the parent for all callback-generated spans |
| 656 | + with sentry_sdk.start_span( |
| 657 | + op=OP.GEN_AI_INVOKE_AGENT, |
| 658 | + name="AgentExecutor.invoke", |
| 659 | + origin=LangchainIntegration.origin, |
| 660 | + ) as span: |
| 661 | + span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") |
| 662 | + if hasattr(self, "agent") and hasattr(self.agent, "llm"): |
| 663 | + model_name = getattr(self.agent.llm, "model_name", None) or getattr( |
| 664 | + self.agent.llm, "model", None |
| 665 | + ) |
| 666 | + if model_name: |
| 667 | + span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) |
| 668 | + |
| 669 | + return f(self, *args, **kwargs) |
| 670 | + |
| 671 | + return new_invoke |
| 672 | + |
| 673 | + |
| 674 | +def _wrap_agent_executor_stream(f): |
| 675 | + # type: (Callable[..., Any]) -> Callable[..., Any] |
| 676 | + |
| 677 | + @wraps(f) |
| 678 | + def new_stream(self, *args, **kwargs): |
| 679 | + # type: (Any, Any, Any) -> Any |
| 680 | + |
| 681 | + integration = sentry_sdk.get_client().get_integration(LangchainIntegration) |
| 682 | + if integration is None: |
| 683 | + return f(self, *args, **kwargs) |
| 684 | + |
| 685 | + # Create a span that will act as the parent for all callback-generated spans |
| 686 | + with sentry_sdk.start_span( |
| 687 | + op=OP.GEN_AI_INVOKE_AGENT, |
| 688 | + name="AgentExecutor.stream", |
| 689 | + origin=LangchainIntegration.origin, |
| 690 | + ) as span: |
| 691 | + span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") |
| 692 | + if hasattr(self, "agent") and hasattr(self.agent, "llm"): |
| 693 | + model_name = getattr(self.agent.llm, "model_name", None) or getattr( |
| 694 | + self.agent.llm, "model", None |
| 695 | + ) |
| 696 | + if model_name: |
| 697 | + span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) |
| 698 | + |
| 699 | + return f(self, *args, **kwargs) |
| 700 | + |
| 701 | + return new_stream |
0 commit comments