Skip to content

Commit 6cc2b23

Browse files
fix: add usage_metadata None checks to prevent NoneType errors (#429)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Co-authored-by: [email protected] <[email protected]>
1 parent 318c215 commit 6cc2b23

File tree

4 files changed

+6
-6
lines changed

4 files changed

+6
-6
lines changed

src/langtrace_python_sdk/instrumentation/gemini/patch.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def set_response_attributes(
137137
if hasattr(result, "text"):
138138
set_event_completion(span, [{"role": "assistant", "content": result.text}])
139139

140-
if hasattr(result, "usage_metadata"):
140+
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
141141
usage = result.usage_metadata
142142
input_tokens = usage.prompt_token_count
143143
output_tokens = usage.candidates_token_count
@@ -152,7 +152,7 @@ def build_streaming_response(span, response):
152152
item_to_yield = item
153153
complete_response += str(item.text)
154154
yield item_to_yield
155-
if hasattr(item, "usage_metadata"):
155+
if hasattr(item, "usage_metadata") and item.usage_metadata is not None:
156156
usage = item.usage_metadata
157157
input_tokens = usage.prompt_token_count
158158
output_tokens = usage.candidates_token_count
@@ -171,7 +171,7 @@ async def abuild_streaming_response(span, response):
171171
item_to_yield = item
172172
complete_response += str(item.text)
173173
yield item_to_yield
174-
if hasattr(item, "usage_metadata"):
174+
if hasattr(item, "usage_metadata") and item.usage_metadata is not None:
175175
usage = item.usage_metadata
176176
input_tokens = usage.prompt_token_count
177177
output_tokens = usage.candidates_token_count

src/langtrace_python_sdk/instrumentation/langchain_core/patch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def traced_method(wrapped, instance, args, kwargs):
9898
result = wrapped(*args, **kwargs)
9999
if trace_output:
100100
span.set_attribute("langchain.outputs", to_json_string(result))
101-
if hasattr(result, "usage_metadata"):
101+
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
102102
span.set_attribute(
103103
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
104104
result.usage_metadata["input_tokens"],

src/langtrace_python_sdk/instrumentation/vertexai/patch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def set_response_attributes(span: Span, result):
7777
if hasattr(result, "text"):
7878
set_event_completion(span, [{"role": "assistant", "content": result.text}])
7979

80-
if hasattr(result, "usage_metadata"):
80+
if hasattr(result, "usage_metadata") and result.usage_metadata is not None:
8181
usage = result.usage_metadata
8282
input_tokens = usage.prompt_token_count
8383
output_tokens = usage.candidates_token_count

src/langtrace_python_sdk/utils/llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ def set_usage_attributes(self, chunk):
421421
self.completion_tokens = chunk.usage.completion_tokens
422422

423423
# VertexAI
424-
if hasattr(chunk, "usage_metadata"):
424+
if hasattr(chunk, "usage_metadata") and chunk.usage_metadata is not None:
425425
self.completion_tokens = chunk.usage_metadata.candidates_token_count
426426
self.prompt_tokens = chunk.usage_metadata.prompt_token_count
427427

0 commit comments

Comments
 (0)