@@ -418,11 +418,6 @@ def on_chat_model_end(self, response, *, run_id, **kwargs):
418418
419419 self ._exit_span (span_data , run_id )
420420
421- def on_llm_new_token (self , token , * , run_id , ** kwargs ):
422- # type: (SentryLangchainCallback, str, UUID, Any) -> Any
423- """Run on new LLM token. Only available when streaming is enabled."""
424- pass
425-
426421 def on_llm_end (self , response , * , run_id , ** kwargs ):
427422 # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any
428423 """Run when LLM ends running."""
@@ -520,26 +515,6 @@ def on_chat_model_error(self, error, *, run_id, **kwargs):
520515 """Run when Chat Model errors."""
521516 self ._handle_error (run_id , error )
522517
523- def on_chain_start (self , serialized , inputs , * , run_id , ** kwargs ):
524- # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any
525- """Run when chain starts running."""
526- pass
527-
528- def on_chain_end (self , outputs , * , run_id , ** kwargs ):
529- # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any
530- """Run when chain ends running."""
531- with capture_internal_exceptions ():
532- if not run_id or run_id not in self .span_map :
533- return
534-
535- span_data = self .span_map [run_id ]
536- self ._exit_span (span_data , run_id )
537-
538- def on_chain_error (self , error , * , run_id , ** kwargs ):
539- # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
540- """Run when chain errors."""
541- self ._handle_error (run_id , error )
542-
543518 def on_agent_action (self , action , * , run_id , ** kwargs ):
544519 # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any
545520 with capture_internal_exceptions ():
@@ -764,7 +739,11 @@ def new_invoke(self, *args, **kwargs):
764739 result = f (self , * args , ** kwargs )
765740
766741 input = result .get ("input" )
767- if input is not None and should_send_default_pii () and self .include_prompts :
742+ if (
743+ input is not None
744+ and should_send_default_pii ()
745+ and integration .include_prompts
746+ ):
768747 set_data_normalized (
769748 span ,
770749 SPANDATA .GEN_AI_REQUEST_MESSAGES ,
@@ -777,7 +756,7 @@ def new_invoke(self, *args, **kwargs):
777756 if (
778757 output is not None
779758 and should_send_default_pii ()
780- and self .include_prompts
759+ and integration .include_prompts
781760 ):
782761 span .set_data (SPANDATA .GEN_AI_RESPONSE_TEXT , output )
783762
0 commit comments