7575
7676
7777# Contextvar to track agent names in a stack for re-entrant agent support
78- _agent_stack = contextvars .ContextVar ("langchain_agent_stack" , default = None )
78+ _agent_stack = contextvars .ContextVar ("langchain_agent_stack" , default = None ) # type: contextvars.ContextVar[Optional[List[Optional[str]]]]
7979
8080
8181def _push_agent (agent_name ):
@@ -84,6 +84,9 @@ def _push_agent(agent_name):
8484 stack = _agent_stack .get ()
8585 if stack is None :
8686 stack = []
87+ else :
88+ # Copy the list to maintain contextvar isolation across async contexts
89+ stack = stack .copy ()
8790 stack .append (agent_name )
8891 _agent_stack .set (stack )
8992
@@ -92,7 +95,9 @@ def _pop_agent():
9295 # type: () -> Optional[str]
9396 """Pop an agent name from the stack and return it."""
9497 stack = _agent_stack .get ()
95- if stack and len (stack ) > 0 :
98+ if stack :
99+ # Copy the list to maintain contextvar isolation across async contexts
100+ stack = stack .copy ()
96101 agent_name = stack .pop ()
97102 _agent_stack .set (stack )
98103 return agent_name
@@ -103,7 +108,7 @@ def _get_current_agent():
103108 # type: () -> Optional[str]
104109 """Get the current agent name (top of stack) without removing it."""
105110 stack = _agent_stack .get ()
106- if stack and len ( stack ) > 0 :
111+ if stack :
107112 return stack [- 1 ]
108113 return None
109114
@@ -807,47 +812,49 @@ def new_invoke(self, *args, **kwargs):
807812 origin = LangchainIntegration .origin ,
808813 ) as span :
809814 _push_agent (agent_name )
810- if agent_name :
811- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
812-
813- span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
814- span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
815-
816- _set_tools_on_span (span , tools )
817-
818- # Run the agent
819- result = f (self , * args , ** kwargs )
820-
821- input = result .get ("input" )
822- if (
823- input is not None
824- and should_send_default_pii ()
825- and integration .include_prompts
826- ):
827- normalized_messages = normalize_message_roles ([input ])
828- scope = sentry_sdk .get_current_scope ()
829- messages_data = truncate_and_annotate_messages (
830- normalized_messages , span , scope
831- )
832- if messages_data is not None :
833- set_data_normalized (
834- span ,
835- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
836- messages_data ,
837- unpack = False ,
815+ try :
816+ if agent_name :
817+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
818+
819+ span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
820+ span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
821+
822+ _set_tools_on_span (span , tools )
823+
824+ # Run the agent
825+ result = f (self , * args , ** kwargs )
826+
827+ input = result .get ("input" )
828+ if (
829+ input is not None
830+ and should_send_default_pii ()
831+ and integration .include_prompts
832+ ):
833+ normalized_messages = normalize_message_roles ([input ])
834+ scope = sentry_sdk .get_current_scope ()
835+ messages_data = truncate_and_annotate_messages (
836+ normalized_messages , span , scope
838837 )
838+ if messages_data is not None :
839+ set_data_normalized (
840+ span ,
841+ SPANDATA .GEN_AI_REQUEST_MESSAGES ,
842+ messages_data ,
843+ unpack = False ,
844+ )
839845
840- output = result .get ("output" )
841- if (
842- output is not None
843- and should_send_default_pii ()
844- and integration .include_prompts
845- ):
846- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
847-
848- _pop_agent ()
846+ output = result .get ("output" )
847+ if (
848+ output is not None
849+ and should_send_default_pii ()
850+ and integration .include_prompts
851+ ):
852+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
849853
850- return result
854+ return result
855+ finally :
856+ # Ensure agent is popped even if an exception occurs
857+ _pop_agent ()
851858
852859 return new_invoke
853860
@@ -908,45 +915,47 @@ def new_stream(self, *args, **kwargs):
908915
909916 def new_iterator ():
910917 # type: () -> Iterator[Any]
911- for event in old_iterator :
912- yield event
913-
914918 try :
915- output = event .get ("output" )
916- except Exception :
917- output = None
918-
919- if (
920- output is not None
921- and should_send_default_pii ()
922- and integration .include_prompts
923- ):
924- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
925-
926- _pop_agent ()
919+ for event in old_iterator :
920+ yield event
927921
928- span .__exit__ (None , None , None )
922+ try :
923+ output = event .get ("output" )
924+ except Exception :
925+ output = None
926+
927+ if (
928+ output is not None
929+ and should_send_default_pii ()
930+ and integration .include_prompts
931+ ):
932+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
933+ finally :
934+ # Ensure cleanup happens even if iterator is abandoned or fails
935+ _pop_agent ()
936+ span .__exit__ (None , None , None )
929937
930938 async def new_iterator_async ():
931939 # type: () -> AsyncIterator[Any]
932- async for event in old_iterator :
933- yield event
934-
935940 try :
936- output = event .get ("output" )
937- except Exception :
938- output = None
939-
940- if (
941- output is not None
942- and should_send_default_pii ()
943- and integration .include_prompts
944- ):
945- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
941+ async for event in old_iterator :
942+ yield event
946943
947- _pop_agent ()
948-
949- span .__exit__ (None , None , None )
944+ try :
945+ output = event .get ("output" )
946+ except Exception :
947+ output = None
948+
949+ if (
950+ output is not None
951+ and should_send_default_pii ()
952+ and integration .include_prompts
953+ ):
954+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
955+ finally :
956+ # Ensure cleanup happens even if iterator is abandoned or fails
957+ _pop_agent ()
958+ span .__exit__ (None , None , None )
950959
951960 if str (type (result )) == "<class 'async_generator'>" :
952961 result = new_iterator_async ()
0 commit comments