6666from opentelemetry .trace import SpanKind , Tracer , set_span_in_context
6767from opentelemetry .trace .span import Span
6868from opentelemetry .trace .status import Status , StatusCode
69+ from opentelemetry .semconv .attributes .error_attributes import ERROR_TYPE
6970
7071
7172def _extract_class_name_from_serialized (serialized : Optional [dict [str , Any ]]) -> str :
@@ -240,7 +241,11 @@ def _create_span(
240241 # Set metadata as span attributes if available
241242 if metadata is not None :
242243 for key , value in sanitized_metadata .items ():
243- _set_span_attribute (span , f"{ SpanAttributes .TRACELOOP_ASSOCIATION_PROPERTIES } .{ key } " , value )
244+ _set_span_attribute (
245+ span ,
246+ f"{ SpanAttributes .TRACELOOP_ASSOCIATION_PROPERTIES } .{ key } " ,
247+ value ,
248+ )
244249
245250 self .spans [run_id ] = SpanHolder (
246251 span , token , None , [], workflow_name , entity_name , entity_path
@@ -300,7 +305,9 @@ def _create_llm_span(
300305 metadata = metadata ,
301306 )
302307
303- vendor = detect_vendor_from_class (_extract_class_name_from_serialized (serialized ))
308+ vendor = detect_vendor_from_class (
309+ _extract_class_name_from_serialized (serialized )
310+ )
304311
305312 _set_span_attribute (span , SpanAttributes .LLM_SYSTEM , vendor )
306313 _set_span_attribute (span , SpanAttributes .LLM_REQUEST_TYPE , request_type .value )
@@ -425,7 +432,12 @@ def on_chat_model_start(
425432
426433 name = self ._get_name_from_callback (serialized , kwargs = kwargs )
427434 span = self ._create_llm_span (
428- run_id , parent_run_id , name , LLMRequestTypeValues .CHAT , metadata = metadata , serialized = serialized
435+ run_id ,
436+ parent_run_id ,
437+ name ,
438+ LLMRequestTypeValues .CHAT ,
439+ metadata = metadata ,
440+ serialized = serialized ,
429441 )
430442 set_request_params (span , kwargs , self .spans [run_id ])
431443 if should_emit_events ():
@@ -451,7 +463,11 @@ def on_llm_start(
451463
452464 name = self ._get_name_from_callback (serialized , kwargs = kwargs )
453465 span = self ._create_llm_span (
454- run_id , parent_run_id , name , LLMRequestTypeValues .COMPLETION , serialized = serialized
466+ run_id ,
467+ parent_run_id ,
468+ name ,
469+ LLMRequestTypeValues .COMPLETION ,
470+ serialized = serialized ,
455471 )
456472 set_request_params (span , kwargs , self .spans [run_id ])
457473 if should_emit_events ():
@@ -479,7 +495,9 @@ def on_llm_end(
479495 "model_name"
480496 ) or response .llm_output .get ("model_id" )
481497 if model_name is not None :
482- _set_span_attribute (span , SpanAttributes .LLM_RESPONSE_MODEL , model_name or "unknown" )
498+ _set_span_attribute (
499+ span , SpanAttributes .LLM_RESPONSE_MODEL , model_name or "unknown"
500+ )
483501
484502 if self .spans [run_id ].request_model is None :
485503 _set_span_attribute (
@@ -539,7 +557,9 @@ def on_llm_end(
539557 SpanAttributes .LLM_RESPONSE_MODEL : model_name or "unknown" ,
540558 },
541559 )
542- set_chat_response_usage (span , response , self .token_histogram , token_usage is None , model_name )
560+ set_chat_response_usage (
561+ span , response , self .token_histogram , token_usage is None , model_name
562+ )
543563 if should_emit_events ():
544564 self ._emit_llm_end_events (response )
545565 else :
@@ -705,6 +725,8 @@ def on_tool_error(
705725 ** kwargs : Any ,
706726 ) -> None :
707727 """Run when tool errors."""
728+ span = self ._get_span (run_id )
729+ span .set_attribute (ERROR_TYPE , type (error ).__name__ )
708730 self ._handle_error (error , run_id , parent_run_id , ** kwargs )
709731
710732 @dont_throw
0 commit comments