@@ -78,6 +78,9 @@ def setup_once():
7878            AgentExecutor .invoke  =  _wrap_agent_executor_invoke (AgentExecutor .invoke )
7979            AgentExecutor .stream  =  _wrap_agent_executor_stream (AgentExecutor .stream )
8080
81+             AgentExecutor .ainvoke  =  _wrap_agent_executor_ainvoke (AgentExecutor .ainvoke )
82+             AgentExecutor .astream  =  _wrap_agent_executor_astream (AgentExecutor .astream )
83+ 
8184
8285class  WatchedSpan :
8386    span  =  None   # type: Span 
@@ -768,3 +771,138 @@ async def new_iterator_async():
768771        return  result 
769772
770773    return  new_stream 
774+ 
775+ 
776+ def  _wrap_agent_executor_ainvoke (f ):
777+     # type: (Callable[..., Any]) -> Callable[..., Any] 
778+ 
779+     @wraps (f ) 
780+     async  def  new_ainvoke (self , * args , ** kwargs ):
781+         # type: (Any, Any, Any) -> Any 
782+         integration  =  sentry_sdk .get_client ().get_integration (LangchainIntegration )
783+         if  integration  is  None :
784+             return  await  f (self , * args , ** kwargs )
785+ 
786+         agent_name , tools  =  _get_request_data (self , args , kwargs )
787+ 
788+         with  sentry_sdk .start_span (
789+             op = OP .GEN_AI_INVOKE_AGENT ,
790+             name = f"invoke_agent { agent_name }   if  agent_name  else  "invoke_agent" ,
791+             origin = LangchainIntegration .origin ,
792+         ) as  span :
793+             if  agent_name :
794+                 span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
795+ 
796+             span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
797+             span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
798+ 
799+             if  tools :
800+                 set_data_normalized (
801+                     span , SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS , tools , unpack = False 
802+                 )
803+ 
804+             # Run the agent 
805+             result  =  await  f (self , * args , ** kwargs )
806+ 
807+             input  =  result .get ("input" )
808+             if  (
809+                 input  is  not None 
810+                 and  should_send_default_pii ()
811+                 and  integration .include_prompts 
812+             ):
813+                 set_data_normalized (
814+                     span ,
815+                     SPANDATA .GEN_AI_REQUEST_MESSAGES ,
816+                     [
817+                         input ,
818+                     ],
819+                 )
820+ 
821+             output  =  result .get ("output" )
822+             if  (
823+                 output  is  not None 
824+                 and  should_send_default_pii ()
825+                 and  integration .include_prompts 
826+             ):
827+                 span .set_data (SPANDATA .GEN_AI_RESPONSE_TEXT , output )
828+ 
829+             return  result 
830+ 
831+     new_ainvoke .__wrapped__  =  True 
832+     return  new_ainvoke 
833+ 
834+ 
835+ def  _wrap_agent_executor_astream (f ):
836+     # type: (Callable[..., Any]) -> Callable[..., Any] 
837+ 
838+     @wraps (f ) 
839+     def  new_astream (self , * args , ** kwargs ):
840+         # type: (Any, Any, Any) -> Any 
841+         integration  =  sentry_sdk .get_client ().get_integration (LangchainIntegration )
842+         if  integration  is  None :
843+             return  f (self , * args , ** kwargs )
844+ 
845+         agent_name , tools  =  _get_request_data (self , args , kwargs )
846+ 
847+         span  =  sentry_sdk .start_span (
848+             op = OP .GEN_AI_INVOKE_AGENT ,
849+             name = f"invoke_agent { agent_name }  .strip (),
850+             origin = LangchainIntegration .origin ,
851+         )
852+         span .__enter__ ()
853+ 
854+         if  agent_name :
855+             span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
856+ 
857+         span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
858+         span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , True )
859+ 
860+         if  tools :
861+             set_data_normalized (
862+                 span , SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS , tools , unpack = False 
863+             )
864+ 
865+         input  =  args [0 ].get ("input" ) if  len (args ) >=  1  else  None 
866+         if  (
867+             input  is  not None 
868+             and  should_send_default_pii ()
869+             and  integration .include_prompts 
870+         ):
871+             set_data_normalized (
872+                 span ,
873+                 SPANDATA .GEN_AI_REQUEST_MESSAGES ,
874+                 [
875+                     input ,
876+                 ],
877+             )
878+ 
879+         # Run the agent - this returns an async iterator 
880+         result  =  f (self , * args , ** kwargs )
881+ 
882+         old_iterator  =  result 
883+ 
884+         async  def  new_iterator_async ():
885+             # type: () -> AsyncIterator[Any] 
886+             event  =  None 
887+             try :
888+                 async  for  event  in  old_iterator :
889+                     yield  event 
890+             finally :
891+                 try :
892+                     output  =  event .get ("output" ) if  event  else  None 
893+                 except  Exception :
894+                     output  =  None 
895+ 
896+                 if  (
897+                     output  is  not None 
898+                     and  should_send_default_pii ()
899+                     and  integration .include_prompts 
900+                 ):
901+                     span .set_data (SPANDATA .GEN_AI_RESPONSE_TEXT , output )
902+ 
903+                 span .__exit__ (None , None , None )
904+ 
905+         return  new_iterator_async ()
906+ 
907+     new_astream .__wrapped__  =  True 
908+     return  new_astream 
0 commit comments