|
325 | 325 | "ai_msg.tool_calls"
|
326 | 326 | ]
|
327 | 327 | },
|
| 328 | + { |
| 329 | + "cell_type": "markdown", |
| 330 | + "id": "535a16e4-cd5a-479f-b315-37c816ec4387", |
| 331 | + "metadata": {}, |
| 332 | + "source": [ |
| 333 | + "## Multimodal\n", |
| 334 | + "\n", |
| 335 | + "Claude supports image and PDF inputs as content blocks, both in Anthropic's native format (see docs for [vision](https://docs.anthropic.com/en/docs/build-with-claude/vision#base64-encoded-image-example) and [PDF support](https://docs.anthropic.com/en/docs/build-with-claude/pdf-support)) as well as LangChain's [standard format](/docs/how_to/multimodal_inputs/).\n", |
| 336 | + "\n", |
| 337 | + "### Files API\n", |
| 338 | + "\n", |
| 339 | + "Claude also supports interactions with files through its managed [Files API](https://docs.anthropic.com/en/docs/build-with-claude/files). See examples below.\n", |
| 340 | + "\n", |
| 341 | + "The Files API can also be used to upload files to a container for use with Claude's built-in code-execution tools. See the [code execution](#code-execution) section below, for details.\n", |
| 342 | + "\n", |
| 343 | + "<details>\n", |
| 344 | + "<summary>Images</summary>\n", |
| 345 | + "\n", |
| 346 | + "```python\n", |
| 347 | + "# Upload image\n", |
| 348 | + "\n", |
| 349 | + "import anthropic\n", |
| 350 | + "\n", |
| 351 | + "client = anthropic.Anthropic()\n", |
| 352 | + "file = client.beta.files.upload(\n", |
| 353 | + " # Supports image/jpeg, image/png, image/gif, image/webp\n", |
| 354 | + " file=(\"image.png\", open(\"/path/to/image.png\", \"rb\"), \"image/png\"),\n", |
| 355 | + ")\n", |
| 356 | + "image_file_id = file.id\n", |
| 357 | + "\n", |
| 358 | + "\n", |
| 359 | + "# Run inference\n", |
| 360 | + "from langchain_anthropic import ChatAnthropic\n", |
| 361 | + "\n", |
| 362 | + "llm = ChatAnthropic(\n", |
| 363 | + " model=\"claude-sonnet-4-20250514\",\n", |
| 364 | + " betas=[\"files-api-2025-04-14\"],\n", |
| 365 | + ")\n", |
| 366 | + "\n", |
| 367 | + "input_message = {\n", |
| 368 | + " \"role\": \"user\",\n", |
| 369 | + " \"content\": [\n", |
| 370 | + " {\n", |
| 371 | + " \"type\": \"text\",\n", |
| 372 | + " \"text\": \"Describe this image.\",\n", |
| 373 | + " },\n", |
| 374 | + " {\n", |
| 375 | + " \"type\": \"image\",\n", |
| 376 | + " \"source\": {\n", |
| 377 | + " \"type\": \"file\",\n", |
| 378 | + " \"file_id\": image_file_id,\n", |
| 379 | + " },\n", |
| 380 | + " },\n", |
| 381 | + " ],\n", |
| 382 | + "}\n", |
| 383 | + "llm.invoke([input_message])\n", |
| 384 | + "```\n", |
| 385 | + "\n", |
| 386 | + "</details>\n", |
| 387 | + "\n", |
| 388 | + "<details>\n", |
| 389 | + "<summary>PDFs</summary>\n", |
| 390 | + "\n", |
| 391 | + "```python\n", |
| 392 | + "# Upload document\n", |
| 393 | + "\n", |
| 394 | + "import anthropic\n", |
| 395 | + "\n", |
| 396 | + "client = anthropic.Anthropic()\n", |
| 397 | + "file = client.beta.files.upload(\n", |
| 398 | + " file=(\"document.pdf\", open(\"/path/to/document.pdf\", \"rb\"), \"application/pdf\"),\n", |
| 399 | + ")\n", |
| 400 | + "pdf_file_id = file.id\n", |
| 401 | + "\n", |
| 402 | + "\n", |
| 403 | + "# Run inference\n", |
| 404 | + "from langchain_anthropic import ChatAnthropic\n", |
| 405 | + "\n", |
| 406 | + "llm = ChatAnthropic(\n", |
| 407 | + " model=\"claude-sonnet-4-20250514\",\n", |
| 408 | + " betas=[\"files-api-2025-04-14\"],\n", |
| 409 | + ")\n", |
| 410 | + "\n", |
| 411 | + "input_message = {\n", |
| 412 | + " \"role\": \"user\",\n", |
| 413 | + " \"content\": [\n", |
| 414 | + " {\"type\": \"text\", \"text\": \"Describe this document.\"},\n", |
| 415 | + " {\"type\": \"document\", \"source\": {\"type\": \"file\", \"file_id\": pdf_file_id}}\n", |
| 416 | + " ],\n", |
| 417 | + "}\n", |
| 418 | + "llm.invoke([input_message])\n", |
| 419 | + "```\n", |
| 420 | + "\n", |
| 421 | + "</details>" |
| 422 | + ] |
| 423 | + }, |
328 | 424 | {
|
329 | 425 | "cell_type": "markdown",
|
330 | 426 | "id": "6e36d25c-f358-49e5-aefa-b99fbd3fec6b",
|
|
454 | 550 | "print(f\"\\nSecond:\\n{usage_2}\")"
|
455 | 551 | ]
|
456 | 552 | },
|
| 553 | + { |
| 554 | + "cell_type": "markdown", |
| 555 | + "id": "9678656f-1ec4-4bf1-bf62-bbd49eb5c4e7", |
| 556 | + "metadata": {}, |
| 557 | + "source": [ |
| 558 | + ":::tip Extended caching\n", |
| 559 | + "\n", |
| 560 | + " The cache lifetime is 5 minutes by default. If this is too short, you can apply one hour caching by enabling the `\"extended-cache-ttl-2025-04-11\"` beta header:\n", |
| 561 | + "\n", |
| 562 | + " ```python\n", |
| 563 | + " llm = ChatAnthropic(\n", |
| 564 | + " model=\"claude-3-7-sonnet-20250219\",\n", |
| 565 | + " # highlight-next-line\n", |
| 566 | + " betas=[\"extended-cache-ttl-2025-04-11\"],\n", |
| 567 | + " )\n", |
| 568 | + " ```\n", |
| 569 | + " and specifying `\"cache_control\": {\"type\": \"ephemeral\", \"ttl\": \"1h\"}`.\n", |
| 570 | + "\n", |
| 571 | + ":::" |
| 572 | + ] |
| 573 | + }, |
457 | 574 | {
|
458 | 575 | "cell_type": "markdown",
|
459 | 576 | "id": "141ce9c5-012d-4502-9d61-4a413b5d959a",
|
|
953 | 1070 | "response = llm_with_tools.invoke(\"How do I update a web app to TypeScript 5.5?\")"
|
954 | 1071 | ]
|
955 | 1072 | },
|
| 1073 | + { |
| 1074 | + "cell_type": "markdown", |
| 1075 | + "id": "1478cdc6-2e52-4870-80f9-b4ddf88f2db2", |
| 1076 | + "metadata": {}, |
| 1077 | + "source": [ |
| 1078 | + "### Code execution\n", |
| 1079 | + "\n", |
| 1080 | + "Claude can use a [code execution tool](https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/code-execution-tool) to execute Python code in a sandboxed environment.\n", |
| 1081 | + "\n", |
| 1082 | + ":::info Code execution is supported since ``langchain-anthropic>=0.3.14``\n", |
| 1083 | + "\n", |
| 1084 | + ":::" |
| 1085 | + ] |
| 1086 | + }, |
| 1087 | + { |
| 1088 | + "cell_type": "code", |
| 1089 | + "execution_count": 1, |
| 1090 | + "id": "2ce13632-a2da-439f-a429-f66481501630", |
| 1091 | + "metadata": {}, |
| 1092 | + "outputs": [], |
| 1093 | + "source": [ |
| 1094 | + "from langchain_anthropic import ChatAnthropic\n", |
| 1095 | + "\n", |
| 1096 | + "llm = ChatAnthropic(\n", |
| 1097 | + " model=\"claude-sonnet-4-20250514\",\n", |
| 1098 | + " betas=[\"code-execution-2025-05-22\"],\n", |
| 1099 | + ")\n", |
| 1100 | + "\n", |
| 1101 | + "tool = {\"type\": \"code_execution_20250522\", \"name\": \"code_execution\"}\n", |
| 1102 | + "llm_with_tools = llm.bind_tools([tool])\n", |
| 1103 | + "\n", |
| 1104 | + "response = llm_with_tools.invoke(\n", |
| 1105 | + " \"Calculate the mean and standard deviation of \" \"[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\"\n", |
| 1106 | + ")" |
| 1107 | + ] |
| 1108 | + }, |
| 1109 | + { |
| 1110 | + "cell_type": "markdown", |
| 1111 | + "id": "24076f91-3a3d-4e53-9618-429888197061", |
| 1112 | + "metadata": {}, |
| 1113 | + "source": [ |
| 1114 | + "<details>\n", |
| 1115 | + "<summary>Use with Files API</summary>\n", |
| 1116 | + "\n", |
| 1117 | + "Using the Files API, Claude can write code to access files for data analysis and other purposes. See example below:\n", |
| 1118 | + "\n", |
| 1119 | + "```python\n", |
| 1120 | + "# Upload file\n", |
| 1121 | + "\n", |
| 1122 | + "import anthropic\n", |
| 1123 | + "\n", |
| 1124 | + "client = anthropic.Anthropic()\n", |
| 1125 | + "file = client.beta.files.upload(\n", |
| 1126 | + " file=open(\"/path/to/sample_data.csv\", \"rb\")\n", |
| 1127 | + ")\n", |
| 1128 | + "file_id = file.id\n", |
| 1129 | + "\n", |
| 1130 | + "\n", |
| 1131 | + "# Run inference\n", |
| 1132 | + "from langchain_anthropic import ChatAnthropic\n", |
| 1133 | + "\n", |
| 1134 | + "llm = ChatAnthropic(\n", |
| 1135 | + " model=\"claude-sonnet-4-20250514\",\n", |
| 1136 | + " betas=[\"code-execution-2025-05-22\"],\n", |
| 1137 | + ")\n", |
| 1138 | + "\n", |
| 1139 | + "tool = {\"type\": \"code_execution_20250522\", \"name\": \"code_execution\"}\n", |
| 1140 | + "llm_with_tools = llm.bind_tools([tool])\n", |
| 1141 | + "\n", |
| 1142 | + "input_message = {\n", |
| 1143 | + " \"role\": \"user\",\n", |
| 1144 | + " \"content\": [\n", |
| 1145 | + " {\n", |
| 1146 | + " \"type\": \"text\",\n", |
| 1147 | + " \"text\": \"Please plot these data and tell me what you see.\",\n", |
| 1148 | + " },\n", |
| 1149 | + " {\n", |
| 1150 | + " \"type\": \"container_upload\",\n", |
| 1151 | + " \"file_id\": file_id,\n", |
| 1152 | + " },\n", |
| 1153 | + " ]\n", |
| 1154 | + "}\n", |
| 1155 | + "llm_with_tools.invoke([input_message])\n", |
| 1156 | + "```\n", |
| 1157 | + "\n", |
| 1158 | + "Note that Claude may generate files as part of its code execution. You can access these files using the Files API:\n", |
| 1159 | + "```python\n", |
| 1160 | + "# Take all file outputs for demonstration purposes\n", |
| 1161 | + "file_ids = []\n", |
| 1162 | + "for block in response.content:\n", |
| 1163 | + " if block[\"type\"] == \"code_execution_tool_result\":\n", |
| 1164 | + " file_ids.extend(\n", |
| 1165 | + " content[\"file_id\"]\n", |
| 1166 | + " for content in block.get(\"content\", {}).get(\"content\", [])\n", |
| 1167 | + " if \"file_id\" in content\n", |
| 1168 | + " )\n", |
| 1169 | + "\n", |
| 1170 | + "for i, file_id in enumerate(file_ids):\n", |
| 1171 | + " file_content = client.beta.files.download(file_id)\n", |
| 1172 | + " file_content.write_to_file(f\"/path/to/file_{i}.png\")\n", |
| 1173 | + "```\n", |
| 1174 | + "\n", |
| 1175 | + "</details>" |
| 1176 | + ] |
| 1177 | + }, |
| 1178 | + { |
| 1179 | + "cell_type": "markdown", |
| 1180 | + "id": "040f381a-1768-479a-9a5e-aa2d7d77e0d5", |
| 1181 | + "metadata": {}, |
| 1182 | + "source": [ |
| 1183 | + "### Remote MCP\n", |
| 1184 | + "\n", |
| 1185 | + "Claude can use a [MCP connector tool](https://docs.anthropic.com/en/docs/agents-and-tools/mcp-connector) for model-generated calls to remote MCP servers.\n", |
| 1186 | + "\n", |
| 1187 | + ":::info Remote MCP is supported since ``langchain-anthropic>=0.3.14``\n", |
| 1188 | + "\n", |
| 1189 | + ":::" |
| 1190 | + ] |
| 1191 | + }, |
| 1192 | + { |
| 1193 | + "cell_type": "code", |
| 1194 | + "execution_count": 1, |
| 1195 | + "id": "22fc4a89-e6d8-4615-96cb-2e117349aebf", |
| 1196 | + "metadata": {}, |
| 1197 | + "outputs": [], |
| 1198 | + "source": [ |
| 1199 | + "from langchain_anthropic import ChatAnthropic\n", |
| 1200 | + "\n", |
| 1201 | + "mcp_servers = [\n", |
| 1202 | + " {\n", |
| 1203 | + " \"type\": \"url\",\n", |
| 1204 | + " \"url\": \"https://mcp.deepwiki.com/mcp\",\n", |
| 1205 | + " \"name\": \"deepwiki\",\n", |
| 1206 | + " \"tool_configuration\": { # optional configuration\n", |
| 1207 | + " \"enabled\": True,\n", |
| 1208 | + " \"allowed_tools\": [\"ask_question\"],\n", |
| 1209 | + " },\n", |
| 1210 | + " \"authorization_token\": \"PLACEHOLDER\", # optional authorization\n", |
| 1211 | + " }\n", |
| 1212 | + "]\n", |
| 1213 | + "\n", |
| 1214 | + "llm = ChatAnthropic(\n", |
| 1215 | + " model=\"claude-sonnet-4-20250514\",\n", |
| 1216 | + " betas=[\"mcp-client-2025-04-04\"],\n", |
| 1217 | + " mcp_servers=mcp_servers,\n", |
| 1218 | + ")\n", |
| 1219 | + "\n", |
| 1220 | + "response = llm.invoke(\n", |
| 1221 | + " \"What transport protocols does the 2025-03-26 version of the MCP \"\n", |
| 1222 | + " \"spec (modelcontextprotocol/modelcontextprotocol) support?\"\n", |
| 1223 | + ")" |
| 1224 | + ] |
| 1225 | + }, |
956 | 1226 | {
|
957 | 1227 | "cell_type": "markdown",
|
958 | 1228 | "id": "2fd5d545-a40d-42b1-ad0c-0a79e2536c9b",
|
|
0 commit comments