Skip to content

Commit 310e643

Browse files
baskaryaneyurtsev
andauthored
release[anthropic]: 0.3.15 (#31479)
Co-authored-by: Eugene Yurtsev <[email protected]>
1 parent e70ec3b commit 310e643

File tree

4 files changed

+29
-10
lines changed

4 files changed

+29
-10
lines changed

libs/partners/anthropic/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ dependencies = [
1212
"pydantic<3.0.0,>=2.7.4",
1313
]
1414
name = "langchain-anthropic"
15-
version = "0.3.14"
15+
version = "0.3.15"
1616
description = "An integration package connecting AnthropicMessages and LangChain"
1717
readme = "README.md"
1818

libs/partners/anthropic/tests/integration_tests/test_chat_models.py

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import json
44
import os
55
from base64 import b64encode
6-
from typing import Optional
6+
from typing import Optional, cast
77

88
import httpx
99
import pytest
@@ -42,7 +42,10 @@ def test_stream() -> None:
4242
chunks_with_model_name = 0
4343
for token in llm.stream("I'm Pickle Rick"):
4444
assert isinstance(token.content, str)
45-
full = token if full is None else full + token
45+
if full is None:
46+
full = cast(BaseMessageChunk, token)
47+
else:
48+
full = full + token
4649
assert isinstance(token, AIMessageChunk)
4750
if token.usage_metadata is not None:
4851
if token.usage_metadata.get("input_tokens"):
@@ -81,7 +84,10 @@ async def test_astream() -> None:
8184
chunks_with_output_token_counts = 0
8285
async for token in llm.astream("I'm Pickle Rick"):
8386
assert isinstance(token.content, str)
84-
full = token if full is None else full + token
87+
if full is None:
88+
full = cast(BaseMessageChunk, token)
89+
else:
90+
full = full + token
8591
assert isinstance(token, AIMessageChunk)
8692
if token.usage_metadata is not None:
8793
if token.usage_metadata.get("input_tokens"):
@@ -697,7 +703,10 @@ def test_citations() -> None:
697703
# Test streaming
698704
full: Optional[BaseMessageChunk] = None
699705
for chunk in llm.stream(messages):
700-
full = chunk if full is None else full + chunk
706+
if full is None:
707+
full = cast(BaseMessageChunk, chunk)
708+
else:
709+
full = full + chunk
701710
assert isinstance(full, AIMessageChunk)
702711
assert isinstance(full.content, list)
703712
assert any("citations" in block for block in full.content)
@@ -722,7 +731,10 @@ def test_thinking() -> None:
722731
# Test streaming
723732
full: Optional[BaseMessageChunk] = None
724733
for chunk in llm.stream("Hello"):
725-
full = chunk if full is None else full + chunk
734+
if full is None:
735+
full = cast(BaseMessageChunk, chunk)
736+
else:
737+
full = full + chunk
726738
assert isinstance(full, AIMessageChunk)
727739
assert isinstance(full.content, list)
728740
assert any("thinking" in block for block in full.content)
@@ -756,7 +768,10 @@ def test_redacted_thinking() -> None:
756768
# Test streaming
757769
full: Optional[BaseMessageChunk] = None
758770
for chunk in llm.stream(query):
759-
full = chunk if full is None else full + chunk
771+
if full is None:
772+
full = cast(BaseMessageChunk, chunk)
773+
else:
774+
full = full + chunk
760775
assert isinstance(full, AIMessageChunk)
761776
assert isinstance(full.content, list)
762777
stream_has_reasoning = False

libs/partners/anthropic/tests/integration_tests/test_standard.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from typing import Literal, cast
55

66
from langchain_core.language_models import BaseChatModel
7-
from langchain_core.messages import AIMessage
7+
from langchain_core.messages import AIMessage, BaseMessageChunk
88
from langchain_tests.integration_tests import ChatModelIntegrationTests
99

1010
from langchain_anthropic import ChatAnthropic
@@ -146,7 +146,10 @@ def _invoke(llm: ChatAnthropic, input_: list, stream: bool) -> AIMessage:
146146
if stream:
147147
full = None
148148
for chunk in llm.stream(input_):
149-
full = full + chunk if full else chunk # type: ignore[operator]
149+
if full is None:
150+
full = cast(BaseMessageChunk, chunk)
151+
else:
152+
full = full + chunk
150153
return cast(AIMessage, full)
151154
else:
152155
return cast(AIMessage, llm.invoke(input_))

libs/partners/anthropic/uv.lock

Lines changed: 2 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)