@@ -487,7 +487,8 @@ def _handle_anthropic_bad_request(e: anthropic.BadRequestError) -> None:
487
487
class ChatAnthropic (BaseChatModel ):
488
488
"""Anthropic chat models.
489
489
490
- See https://docs.anthropic.com/en/docs/models-overview for a list of the latest models.
490
+ See `Anthropic's docs <https://docs.anthropic.com/en/docs/models-overview>`__ for a
491
+ list of the latest models.
491
492
492
493
Setup:
493
494
Install ``langchain-anthropic`` and set environment variable ``ANTHROPIC_API_KEY``.
@@ -499,9 +500,9 @@ class ChatAnthropic(BaseChatModel):
499
500
500
501
Key init args — completion params:
501
502
model: str
502
- Name of Anthropic model to use. E .g. " claude-3-sonnet-20240229" .
503
+ Name of Anthropic model to use. e .g. ``' claude-3-sonnet-20240229'`` .
503
504
temperature: float
504
- Sampling temperature. Ranges from 0.0 to 1.0.
505
+ Sampling temperature. Ranges from `` 0.0`` to `` 1.0`` .
505
506
max_tokens: int
506
507
Max number of tokens to generate.
507
508
@@ -511,7 +512,8 @@ class ChatAnthropic(BaseChatModel):
511
512
max_retries: int
512
513
Max number of retries if a request fails.
513
514
api_key: Optional[str]
514
- Anthropic API key. If not passed in will be read from env var ANTHROPIC_API_KEY.
515
+ Anthropic API key. If not passed in will be read from env var
516
+ ``ANTHROPIC_API_KEY``.
515
517
base_url: Optional[str]
516
518
Base URL for API requests. Only specify if using a proxy or service
517
519
emulator.
@@ -1183,17 +1185,16 @@ def get_weather(location: str) -> str:
1183
1185
"""Base URL for API requests. Only specify if using a proxy or service emulator.
1184
1186
1185
1187
If a value isn't passed in, will attempt to read the value first from
1186
- ANTHROPIC_API_URL and if that is not set, ANTHROPIC_BASE_URL.
1187
- If neither are set, the default value of ' https://api.anthropic.com' will
1188
+ `` ANTHROPIC_API_URL`` and if that is not set, `` ANTHROPIC_BASE_URL`` .
1189
+ If neither are set, the default value of `` https://api.anthropic.com`` will
1188
1190
be used.
1189
1191
"""
1190
1192
1191
1193
anthropic_api_key : SecretStr = Field (
1192
1194
alias = "api_key" ,
1193
1195
default_factory = secret_from_env ("ANTHROPIC_API_KEY" , default = "" ),
1194
1196
)
1195
-
1196
- """Automatically read from env var `ANTHROPIC_API_KEY` if not provided."""
1197
+ """Automatically read from env var ``ANTHROPIC_API_KEY`` if not provided."""
1197
1198
1198
1199
default_headers : Optional [Mapping [str , str ]] = None
1199
1200
"""Headers to pass to the Anthropic clients, will be used for every API call."""
@@ -1211,7 +1212,7 @@ def get_weather(location: str) -> str:
1211
1212
"""Whether to use streaming or not."""
1212
1213
1213
1214
stream_usage : bool = True
1214
- """Whether to include usage metadata in streaming output. If True, additional
1215
+ """Whether to include usage metadata in streaming output. If `` True`` , additional
1215
1216
message chunks will be generated during the stream including usage metadata.
1216
1217
"""
1217
1218
@@ -1570,7 +1571,7 @@ def bind_tools(
1570
1571
tool_choice: Which tool to require the model to call. Options are:
1571
1572
1572
1573
- name of the tool as a string or as dict ``{"type": "tool", "name": "<<tool_name>>"}``: calls corresponding tool;
1573
- - ``"auto"``, ``{"type: "auto"}``, or None: automatically selects a tool (including no tool);
1574
+ - ``"auto"``, ``{"type: "auto"}``, or `` None`` : automatically selects a tool (including no tool);
1574
1575
- ``"any"`` or ``{"type: "any"}``: force at least one tool to be called;
1575
1576
parallel_tool_calls: Set to ``False`` to disable parallel tool use.
1576
1577
Defaults to ``None`` (no specification, which allows parallel tool use).
@@ -1580,6 +1581,7 @@ def bind_tools(
1580
1581
:meth:`~langchain_anthropic.chat_models.ChatAnthropic.bind`.
1581
1582
1582
1583
Example:
1584
+
1583
1585
.. code-block:: python
1584
1586
1585
1587
from langchain_anthropic import ChatAnthropic
@@ -1608,7 +1610,8 @@ class GetPrice(BaseModel):
1608
1610
# id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0'
1609
1611
# )
1610
1612
1611
- Example — force tool call with tool_choice 'any':
1613
+ Example — force tool call with tool_choice ``'any'``:
1614
+
1612
1615
.. code-block:: python
1613
1616
1614
1617
from langchain_anthropic import ChatAnthropic
@@ -1630,7 +1633,8 @@ class GetPrice(BaseModel):
1630
1633
llm_with_tools.invoke("what is the weather like in San Francisco",)
1631
1634
1632
1635
1633
- Example — force specific tool call with tool_choice '<name_of_tool>':
1636
+ Example — force specific tool call with tool_choice ``'<name_of_tool>'``:
1637
+
1634
1638
.. code-block:: python
1635
1639
1636
1640
from langchain_anthropic import ChatAnthropic
@@ -1652,6 +1656,7 @@ class GetPrice(BaseModel):
1652
1656
llm_with_tools.invoke("what is the weather like in San Francisco",)
1653
1657
1654
1658
Example — cache specific tools:
1659
+
1655
1660
.. code-block:: python
1656
1661
1657
1662
from langchain_anthropic import ChatAnthropic, convert_to_anthropic_tool
@@ -1754,28 +1759,29 @@ def with_structured_output(
1754
1759
for more on how to properly specify types and descriptions of
1755
1760
schema fields when specifying a Pydantic or TypedDict class.
1756
1761
include_raw:
1757
- If False then only the parsed structured output is returned. If
1758
- an error occurs during model output parsing it will be raised. If True
1762
+ If `` False`` then only the parsed structured output is returned. If
1763
+ an error occurs during model output parsing it will be raised. If `` True``
1759
1764
then both the raw model response (a BaseMessage) and the parsed model
1760
1765
response will be returned. If an error occurs during output parsing it
1761
1766
will be caught and returned as well. The final output is always a dict
1762
- with keys " raw", " parsed" , and " parsing_error" .
1767
+ with keys `` raw``, `` parsed`` , and `` parsing_error`` .
1763
1768
kwargs: Additional keyword arguments are ignored.
1764
1769
1765
1770
Returns:
1766
1771
A Runnable that takes same inputs as a :class:`~langchain_core.language_models.chat.BaseChatModel`.
1767
1772
1768
- If ``include_raw`` is False and ``schema`` is a Pydantic class, Runnable outputs
1773
+ If ``include_raw`` is `` False`` and ``schema`` is a Pydantic class, Runnable outputs
1769
1774
an instance of ``schema`` (i.e., a Pydantic object).
1770
1775
1771
- Otherwise, if ``include_raw`` is False then Runnable outputs a dict.
1776
+ Otherwise, if ``include_raw`` is `` False`` then Runnable outputs a dict.
1772
1777
1773
1778
If ``include_raw`` is True, then Runnable outputs a dict with keys:
1774
- - ``" raw" ``: BaseMessage
1775
- - ``" parsed" ``: None if there was a parsing error, otherwise the type depends on the ``schema`` as described above.
1776
- - ``" parsing_error" ``: Optional[BaseException]
1779
+ - ``raw``: BaseMessage
1780
+ - ``parsed``: None if there was a parsing error, otherwise the type depends on the ``schema`` as described above.
1781
+ - ``parsing_error``: Optional[BaseException]
1777
1782
1778
1783
Example: Pydantic schema (include_raw=False):
1784
+
1779
1785
.. code-block:: python
1780
1786
1781
1787
from langchain_anthropic import ChatAnthropic
@@ -1797,6 +1803,7 @@ class AnswerWithJustification(BaseModel):
1797
1803
# )
1798
1804
1799
1805
Example: Pydantic schema (include_raw=True):
1806
+
1800
1807
.. code-block:: python
1801
1808
1802
1809
from langchain_anthropic import ChatAnthropic
@@ -1818,6 +1825,7 @@ class AnswerWithJustification(BaseModel):
1818
1825
# }
1819
1826
1820
1827
Example: Dict schema (include_raw=False):
1828
+
1821
1829
.. code-block:: python
1822
1830
1823
1831
from langchain_anthropic import ChatAnthropic
@@ -1902,6 +1910,7 @@ def get_num_tokens_from_messages(
1902
1910
to be converted to tool schemas.
1903
1911
1904
1912
Basic usage:
1913
+
1905
1914
.. code-block:: python
1906
1915
1907
1916
from langchain_anthropic import ChatAnthropic
@@ -1920,6 +1929,7 @@ def get_num_tokens_from_messages(
1920
1929
14
1921
1930
1922
1931
Pass tool schemas:
1932
+
1923
1933
.. code-block:: python
1924
1934
1925
1935
from langchain_anthropic import ChatAnthropic
@@ -1948,9 +1958,9 @@ def get_weather(location: str) -> str:
1948
1958
1949
1959
.. versionchanged:: 0.3.0
1950
1960
1951
- Uses Anthropic's token counting API to count tokens in messages. See:
1952
- https://docs.anthropic.com/en/docs/build-with-claude/token-counting
1953
- """
1961
+ Uses Anthropic's ` token counting API <https://docs.anthropic.com/en/docs/build-with-claude/token-counting>`__ to count tokens in messages.
1962
+
1963
+ """ # noqa: E501
1954
1964
formatted_system , formatted_messages = _format_messages (messages )
1955
1965
if isinstance (formatted_system , str ):
1956
1966
kwargs ["system" ] = formatted_system
@@ -2044,7 +2054,7 @@ def _make_message_chunk_from_anthropic_event(
2044
2054
"""Convert Anthropic event to AIMessageChunk.
2045
2055
2046
2056
Note that not all events will result in a message chunk. In these cases
2047
- we return None.
2057
+ we return `` None`` .
2048
2058
"""
2049
2059
message_chunk : Optional [AIMessageChunk ] = None
2050
2060
# See https://github.com/anthropics/anthropic-sdk-python/blob/main/src/anthropic/lib/streaming/_messages.py # noqa: E501
0 commit comments