@@ -351,6 +351,7 @@ def on_success(self, span: Span, result: Dict[str, Any]):
351351 # Make sure to close the stream
352352 result ["body" ].close ()
353353
354+ # pylint: disable=no-self-use
354355 def _handle_amazon_titan_response (self , span : Span , response_body : Dict [str , Any ]):
355356 if "inputTextTokenCount" in response_body :
356357 span .set_attribute (GEN_AI_USAGE_INPUT_TOKENS , response_body ["inputTextTokenCount" ])
@@ -361,6 +362,7 @@ def _handle_amazon_titan_response(self, span: Span, response_body: Dict[str, Any
361362 if "completionReason" in result :
362363 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [result ["completionReason" ]])
363364
365+ # pylint: disable=no-self-use
364366 def _handle_anthropic_claude_response (self , span : Span , response_body : Dict [str , Any ]):
365367 if "usage" in response_body :
366368 usage = response_body ["usage" ]
@@ -371,13 +373,15 @@ def _handle_anthropic_claude_response(self, span: Span, response_body: Dict[str,
371373 if "stop_reason" in response_body :
372374 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [response_body ["stop_reason" ]])
373375
376+ # pylint: disable=no-self-use
374377 def _handle_cohere_command_response (self , span : Span , response_body : Dict [str , Any ]):
375378 # Output tokens: Approximate from the response text
376379 if "text" in response_body :
377380 span .set_attribute (GEN_AI_USAGE_OUTPUT_TOKENS , math .ceil (len (response_body ["text" ]) / 6 ))
378381 if "finish_reason" in response_body :
379382 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [response_body ["finish_reason" ]])
380383
384+ # pylint: disable=no-self-use
381385 def _handle_ai21_jamba_response (self , span : Span , response_body : Dict [str , Any ]):
382386 if "usage" in response_body :
383387 usage = response_body ["usage" ]
@@ -390,6 +394,7 @@ def _handle_ai21_jamba_response(self, span: Span, response_body: Dict[str, Any])
390394 if "finish_reason" in choices :
391395 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [choices ["finish_reason" ]])
392396
397+ # pylint: disable=no-self-use
393398 def _handle_meta_llama_response (self , span : Span , response_body : Dict [str , Any ]):
394399 if "prompt_token_count" in response_body :
395400 span .set_attribute (GEN_AI_USAGE_INPUT_TOKENS , response_body ["prompt_token_count" ])
@@ -398,6 +403,7 @@ def _handle_meta_llama_response(self, span: Span, response_body: Dict[str, Any])
398403 if "stop_reason" in response_body :
399404 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [response_body ["stop_reason" ]])
400405
406+ # pylint: disable=no-self-use
401407 def _handle_mistral_mistral_response (self , span : Span , response_body : Dict [str , Any ]):
402408 if "outputs" in response_body :
403409 outputs = response_body ["outputs" ][0 ]
0 commit comments