@@ -22,6 +22,7 @@ def __add__(self, y: "TokenUsage") -> "TokenUsage":
2222 return TokenUsage (
2323 input_tokens = self .input_tokens + y .input_tokens ,
2424 output_tokens = self .output_tokens + y .output_tokens ,
25+ model = self .model ,
2526 )
2627 else :
2728 raise ValueError ("Cannot add TokenUsage objects with different models" )
@@ -67,8 +68,11 @@ def get_token_usage_for_openai(
6768 return TokenUsage (input_tokens = 0 , output_tokens = 0 )
6869 output_tokens = get_from_dict (llm_output , "token_usage.completion_tokens" , 0 )
6970 input_tokens = get_from_dict (llm_output , "token_usage.prompt_tokens" , 0 )
71+ model = get_from_dict (llm_output , "model_name" , "" )
7072
71- return TokenUsage (input_tokens = input_tokens , output_tokens = output_tokens )
73+ return TokenUsage (
74+ input_tokens = input_tokens , output_tokens = output_tokens , model = model
75+ )
7276
7377
7478def get_token_usage_for_anthropic (
@@ -92,10 +96,15 @@ def get_token_usage_for_anthropic(
9296 "usage.output_tokens" ,
9397 0 ,
9498 ),
99+ model = get_from_dict (
100+ g .message .response_metadata , "model" , ""
101+ ),
95102 )
96103 )
97-
98- return sum (token_usages , TokenUsage (input_tokens = 0 , output_tokens = 0 ))
104+ model = next ((usage .model for usage in token_usages if usage .model ), "" )
105+ return sum (
106+ token_usages , TokenUsage (input_tokens = 0 , output_tokens = 0 , model = model )
107+ )
99108 else :
100109 return TokenUsage (input_tokens = 0 , output_tokens = 0 )
101110
@@ -120,10 +129,15 @@ def get_token_usage_for_bedrock(
120129 "usage.completion_tokens" ,
121130 0 ,
122131 ),
132+ model = get_from_dict (
133+ g .message .response_metadata , "model_id" , ""
134+ ),
123135 )
124136 )
125-
126- return sum (token_usages , TokenUsage (input_tokens = 0 , output_tokens = 0 ))
137+ model = next ((usage .model for usage in token_usages if usage .model ), "" )
138+ return sum (
139+ token_usages , TokenUsage (input_tokens = 0 , output_tokens = 0 , model = model )
140+ )
127141 return TokenUsage (input_tokens = 0 , output_tokens = 0 )
128142
129143
0 commit comments