1010from sentry_sdk .scope import should_send_default_pii
1111from sentry_sdk .tracing import Span
1212from sentry_sdk .tracing_utils import _get_value
13- from sentry_sdk .utils import logger , capture_internal_exceptions
13+ from sentry_sdk .utils import logger , capture_internal_exceptions , safe_serialize
1414
1515from typing import TYPE_CHECKING
1616
5151 "presence_penalty" : SPANDATA .GEN_AI_REQUEST_PRESENCE_PENALTY ,
5252 "temperature" : SPANDATA .GEN_AI_REQUEST_TEMPERATURE ,
5353 "tool_calls" : SPANDATA .GEN_AI_RESPONSE_TOOL_CALLS ,
54- "tools" : SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS ,
5554 "top_k" : SPANDATA .GEN_AI_REQUEST_TOP_K ,
5655 "top_p" : SPANDATA .GEN_AI_REQUEST_TOP_P ,
5756}
@@ -203,8 +202,18 @@ def on_llm_start(
203202 if key in all_params and all_params [key ] is not None :
204203 set_data_normalized (span , attribute , all_params [key ], unpack = False )
205204
205+ # Handle tools separately with simplified format
206+ tools = all_params .get ("tools" )
207+ if tools is not None :
208+ simplified_tools = _simplify_langchain_tools (tools )
209+ if simplified_tools :
210+ span .set_data (
211+ SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS ,
212+ safe_serialize (simplified_tools ),
213+ )
214+
206215 if should_send_default_pii () and self .include_prompts :
207- set_data_normalized ( span , SPANDATA .GEN_AI_REQUEST_MESSAGES , prompts )
216+ span . set_data ( SPANDATA .GEN_AI_REQUEST_MESSAGES , safe_serialize ( prompts ) )
208217
209218 def on_chat_model_start (self , serialized , messages , * , run_id , ** kwargs ):
210219 # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any
@@ -246,14 +255,27 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
246255 if key in all_params and all_params [key ] is not None :
247256 set_data_normalized (span , attribute , all_params [key ], unpack = False )
248257
258+ # Handle tools separately with simplified format
259+ tools = all_params .get ("tools" )
260+ if tools is not None :
261+ simplified_tools = _simplify_langchain_tools (tools )
262+ if simplified_tools :
263+ span .set_data (
264+ SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS ,
265+ safe_serialize (simplified_tools ),
266+ )
267+
249268 if should_send_default_pii () and self .include_prompts :
250- set_data_normalized (
251- span ,
269+ # Flatten the nested list structure to a single list of message dicts
270+ normalized_messages = []
271+ for list_ in messages :
272+ for message in list_ :
273+ normalized_messages .append (
274+ self ._normalize_langchain_message (message )
275+ )
276+ span .set_data (
252277 SPANDATA .GEN_AI_REQUEST_MESSAGES ,
253- [
254- [self ._normalize_langchain_message (x ) for x in list_ ]
255- for list_ in messages
256- ],
278+ safe_serialize (normalized_messages ),
257279 )
258280
259281 def on_chat_model_end (self , response , * , run_id , ** kwargs ):
@@ -531,6 +553,79 @@ def _get_request_data(obj, args, kwargs):
531553 return (agent_name , tools )
532554
533555
556+ def _simplify_langchain_tools (tools ):
557+ # type: (Any) -> Optional[List[Any]]
558+ """Parse and simplify tools into a cleaner format."""
559+ if not tools :
560+ return None
561+
562+ if not isinstance (tools , (list , tuple )):
563+ return None
564+
565+ simplified_tools = []
566+ for tool in tools :
567+ try :
568+ if isinstance (tool , dict ):
569+ # Handle OpenAI-style tool format
570+ if "function" in tool and isinstance (tool ["function" ], dict ):
571+ func = tool ["function" ]
572+ simplified_tool = {
573+ "name" : func .get ("name" ),
574+ "description" : func .get ("description" ),
575+ }
576+ if simplified_tool ["name" ]: # Only add if name exists
577+ simplified_tools .append (simplified_tool )
578+ # Handle direct tool dict format
579+ elif "name" in tool :
580+ simplified_tool = {
581+ "name" : tool .get ("name" ),
582+ "description" : tool .get ("description" ),
583+ }
584+ simplified_tools .append (simplified_tool )
585+ else :
586+ # Try to extract from any dict structure
587+ name = (
588+ tool .get ("name" )
589+ or tool .get ("tool_name" )
590+ or tool .get ("function_name" )
591+ )
592+ if name :
593+ simplified_tools .append (
594+ {
595+ "name" : name ,
596+ "description" : tool .get ("description" )
597+ or tool .get ("desc" ),
598+ }
599+ )
600+ elif hasattr (tool , "name" ):
601+ # Handle tool objects with name attribute
602+ simplified_tool = {
603+ "name" : getattr (tool , "name" , None ),
604+ "description" : getattr (tool , "description" , None )
605+ or getattr (tool , "desc" , None ),
606+ }
607+ if simplified_tool ["name" ]:
608+ simplified_tools .append (simplified_tool )
609+ elif hasattr (tool , "__name__" ):
610+ # Handle callable objects
611+ simplified_tools .append (
612+ {
613+ "name" : tool .__name__ ,
614+ "description" : getattr (tool , "__doc__" , None ),
615+ }
616+ )
617+ else :
618+ # Fallback - try to convert to string
619+ tool_str = str (tool )
620+ if tool_str and tool_str != "" :
621+ simplified_tools .append ({"name" : tool_str , "description" : None })
622+ except Exception :
623+ # Skip problematic tools rather than failing
624+ continue
625+
626+ return simplified_tools if simplified_tools else None
627+
628+
534629def _wrap_configure (f ):
535630 # type: (Callable[..., Any]) -> Callable[..., Any]
536631
@@ -639,9 +734,12 @@ def new_invoke(self, *args, **kwargs):
639734 span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
640735
641736 if tools :
642- set_data_normalized (
643- span , SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS , tools , unpack = False
644- )
737+ simplified_tools = _simplify_langchain_tools (tools )
738+ if simplified_tools :
739+ span .set_data (
740+ SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS ,
741+ safe_serialize (simplified_tools ),
742+ )
645743
646744 # Run the agent
647745 result = f (self , * args , ** kwargs )
@@ -653,11 +751,7 @@ def new_invoke(self, *args, **kwargs):
653751 and integration .include_prompts
654752 ):
655753 set_data_normalized (
656- span ,
657- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
658- [
659- input ,
660- ],
754+ span , SPANDATA .GEN_AI_REQUEST_MESSAGES , safe_serialize ([input ])
661755 )
662756
663757 output = result .get ("output" )
@@ -699,23 +793,20 @@ def new_stream(self, *args, **kwargs):
699793 span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , True )
700794
701795 if tools :
702- set_data_normalized (
703- span , SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS , tools , unpack = False
704- )
796+ simplified_tools = _simplify_langchain_tools (tools )
797+ if simplified_tools :
798+ span .set_data (
799+ SPANDATA .GEN_AI_REQUEST_AVAILABLE_TOOLS ,
800+ safe_serialize (simplified_tools ),
801+ )
705802
706803 input = args [0 ].get ("input" ) if len (args ) >= 1 else None
707804 if (
708805 input is not None
709806 and should_send_default_pii ()
710807 and integration .include_prompts
711808 ):
712- set_data_normalized (
713- span ,
714- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
715- [
716- input ,
717- ],
718- )
809+ span .set_data (SPANDATA .GEN_AI_REQUEST_MESSAGES , safe_serialize ([input ]))
719810
720811 # Run the agent
721812 result = f (self , * args , ** kwargs )
0 commit comments