Skip to content

Commit f6bf83c

Browse files
author
Jeel Mehta
committed
Pylint no self use disabled
1 parent 2f8cecc commit f6bf83c

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

aws-opentelemetry-distro/src/amazon/opentelemetry/distro/patches/_bedrock_patches.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -351,6 +351,7 @@ def on_success(self, span: Span, result: Dict[str, Any]):
351351
# Make sure to close the stream
352352
result["body"].close()
353353

354+
# pylint: disable=no-self-use
354355
def _handle_amazon_titan_response(self, span: Span, response_body: Dict[str, Any]):
355356
if "inputTextTokenCount" in response_body:
356357
span.set_attribute(GEN_AI_USAGE_INPUT_TOKENS, response_body["inputTextTokenCount"])
@@ -361,6 +362,7 @@ def _handle_amazon_titan_response(self, span: Span, response_body: Dict[str, Any
361362
if "completionReason" in result:
362363
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [result["completionReason"]])
363364

365+
# pylint: disable=no-self-use
364366
def _handle_anthropic_claude_response(self, span: Span, response_body: Dict[str, Any]):
365367
if "usage" in response_body:
366368
usage = response_body["usage"]
@@ -371,13 +373,15 @@ def _handle_anthropic_claude_response(self, span: Span, response_body: Dict[str,
371373
if "stop_reason" in response_body:
372374
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [response_body["stop_reason"]])
373375

376+
# pylint: disable=no-self-use
374377
def _handle_cohere_command_response(self, span: Span, response_body: Dict[str, Any]):
375378
# Output tokens: Approximate from the response text
376379
if "text" in response_body:
377380
span.set_attribute(GEN_AI_USAGE_OUTPUT_TOKENS, math.ceil(len(response_body["text"]) / 6))
378381
if "finish_reason" in response_body:
379382
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [response_body["finish_reason"]])
380383

384+
# pylint: disable=no-self-use
381385
def _handle_ai21_jamba_response(self, span: Span, response_body: Dict[str, Any]):
382386
if "usage" in response_body:
383387
usage = response_body["usage"]
@@ -390,6 +394,7 @@ def _handle_ai21_jamba_response(self, span: Span, response_body: Dict[str, Any])
390394
if "finish_reason" in choices:
391395
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [choices["finish_reason"]])
392396

397+
# pylint: disable=no-self-use
393398
def _handle_meta_llama_response(self, span: Span, response_body: Dict[str, Any]):
394399
if "prompt_token_count" in response_body:
395400
span.set_attribute(GEN_AI_USAGE_INPUT_TOKENS, response_body["prompt_token_count"])
@@ -398,6 +403,7 @@ def _handle_meta_llama_response(self, span: Span, response_body: Dict[str, Any])
398403
if "stop_reason" in response_body:
399404
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [response_body["stop_reason"]])
400405

406+
# pylint: disable=no-self-use
401407
def _handle_mistral_mistral_response(self, span: Span, response_body: Dict[str, Any]):
402408
if "outputs" in response_body:
403409
outputs = response_body["outputs"][0]

0 commit comments

Comments
 (0)