@@ -23,18 +23,28 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler):
2323 from langchain.chat_models import init_chat_model
2424 from langchain_core.callbacks import UsageMetadataCallbackHandler
2525
26- llm = init_chat_model(model="openai:gpt-4o-mini")
26+ llm_1 = init_chat_model(model="openai:gpt-4o-mini")
27+ llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest")
2728
2829 callback = UsageMetadataCallbackHandler()
29- results = llm.batch(["Hello", "Goodbye"], config={"callbacks": [callback]})
30- print(callback.usage_metadata)
30+ result_1 = llm_1.invoke("Hello", config={"callbacks": [callback]})
31+ result_2 = llm_2.invoke("Hello", config={"callbacks": [callback]})
32+ callback.usage_metadata
3133
3234 .. code-block:: none
3335
34- {'output_token_details': {'audio': 0, 'reasoning': 0}, 'input_tokens': 17, 'output_tokens': 31, 'total_tokens': 48, 'input_token_details': {'cache_read': 0, 'audio': 0}}
36+ {'gpt-4o-mini-2024-07-18': {'input_tokens': 8,
37+ 'output_tokens': 10,
38+ 'total_tokens': 18,
39+ 'input_token_details': {'audio': 0, 'cache_read': 0},
40+ 'output_token_details': {'audio': 0, 'reasoning': 0}},
41+ 'claude-3-5-haiku-20241022': {'input_tokens': 8,
42+ 'output_tokens': 21,
43+ 'total_tokens': 29,
44+ 'input_token_details': {'cache_read': 0, 'cache_creation': 0}}}
3545
3646 .. versionadded:: 0.3.49
37- """ # noqa: E501
47+ """
3848
3949 def __init__ (self ) -> None :
4050 super ().__init__ ()
@@ -92,19 +102,28 @@ def get_usage_metadata_callback(
92102 from langchain.chat_models import init_chat_model
93103 from langchain_core.callbacks import get_usage_metadata_callback
94104
95- llm = init_chat_model(model="openai:gpt-4o-mini")
105+ llm_1 = init_chat_model(model="openai:gpt-4o-mini")
106+ llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest")
96107
97108 with get_usage_metadata_callback() as cb:
98- llm .invoke("Hello")
99- llm .invoke("Goodbye ")
109+ llm_1 .invoke("Hello")
110+ llm_2 .invoke("Hello ")
100111 print(cb.usage_metadata)
101112
102113 .. code-block:: none
103114
104- {'output_token_details': {'audio': 0, 'reasoning': 0}, 'input_tokens': 17, 'output_tokens': 31, 'total_tokens': 48, 'input_token_details': {'cache_read': 0, 'audio': 0}}
115+ {'gpt-4o-mini-2024-07-18': {'input_tokens': 8,
116+ 'output_tokens': 10,
117+ 'total_tokens': 18,
118+ 'input_token_details': {'audio': 0, 'cache_read': 0},
119+ 'output_token_details': {'audio': 0, 'reasoning': 0}},
120+ 'claude-3-5-haiku-20241022': {'input_tokens': 8,
121+ 'output_tokens': 21,
122+ 'total_tokens': 29,
123+ 'input_token_details': {'cache_read': 0, 'cache_creation': 0}}}
105124
106125 .. versionadded:: 0.3.49
107- """ # noqa: E501
126+ """
108127 from langchain_core .tracers .context import register_configure_hook
109128
110129 usage_metadata_callback_var : ContextVar [Optional [UsageMetadataCallbackHandler ]] = (
0 commit comments