Skip to content

Commit df5b72a

Browse files
authored
fix(langchain): capture usage on streamed gemini responses (#1309)
1 parent 9047253 commit df5b72a

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

langfuse/langchain/CallbackHandler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1140,7 +1140,9 @@ def _parse_usage(response: LLMResult) -> Any:
11401140
llm_usage = _parse_usage_model(
11411141
generation_chunk.generation_info["usage_metadata"]
11421142
)
1143-
break
1143+
1144+
if llm_usage is not None:
1145+
break
11441146

11451147
message_chunk = getattr(generation_chunk, "message", {})
11461148
response_metadata = getattr(message_chunk, "response_metadata", {})

tests/test_langchain.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,7 @@ def test_callback_generated_from_lcel_chain():
177177
assert langchain_generation_span.output != ""
178178

179179

180+
@pytest.mark.skip(reason="Flaky")
180181
def test_basic_chat_openai():
181182
# Create a unique name for this test
182183
test_name = f"Test Basic Chat {create_uuid()}"

0 commit comments

Comments
 (0)