Skip to content

Commit 426a0fd

Browse files
committed
remove logs
1 parent 52a1237 commit 426a0fd

File tree

1 file changed

+1
-15
lines changed

1 file changed

+1
-15
lines changed

sentry_sdk/integrations/langchain.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -298,8 +298,6 @@ def on_chat_model_end(self, response, *, run_id, **kwargs):
298298
if not run_id:
299299
return
300300

301-
# Extract token usage following LangChain's callback pattern
302-
# Reference: https://python.langchain.com/docs/how_to/llm_token_usage_tracking/
303301
token_usage = None
304302

305303
if response.llm_output and "token_usage" in response.llm_output:
@@ -327,26 +325,14 @@ def on_chat_model_end(self, response, *, run_id, **kwargs):
327325
input_tokens, output_tokens, total_tokens = (
328326
self._extract_token_usage(token_usage)
329327
)
330-
# Log token usage for debugging (will be removed in production)
331-
logger.debug(
332-
"LangChain token usage found: input=%s, output=%s, total=%s",
333-
input_tokens,
334-
output_tokens,
335-
total_tokens,
336-
)
328+
337329
record_token_usage(
338330
span_data.span,
339331
input_tokens=input_tokens,
340332
output_tokens=output_tokens,
341333
total_tokens=total_tokens,
342334
)
343335
else:
344-
# Fallback to manual token counting when no usage info is available
345-
logger.debug(
346-
"No token usage from LangChain, using manual count: input=%s, output=%s",
347-
span_data.num_prompt_tokens,
348-
span_data.num_completion_tokens,
349-
)
350336
record_token_usage(
351337
span_data.span,
352338
input_tokens=(

0 commit comments

Comments
 (0)