Skip to content

Commit ec2b34a

Browse files
authored
feat(openai): custom tools (#32449)
1 parent 145d38f commit ec2b34a

File tree

11 files changed

+488
-4
lines changed

11 files changed

+488
-4
lines changed

docs/docs/integrations/chat/openai.ipynb

Lines changed: 157 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -447,6 +447,163 @@
447447
")"
448448
]
449449
},
450+
{
451+
"cell_type": "markdown",
452+
"id": "c5d9d19d-8ab1-4d9d-b3a0-56ee4e89c528",
453+
"metadata": {},
454+
"source": [
455+
"### Custom tools\n",
456+
"\n",
457+
":::info Requires ``langchain-openai>=0.3.29``\n",
458+
"\n",
459+
":::\n",
460+
"\n",
461+
"[Custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) support tools with arbitrary string inputs. They can be particularly useful when you expect your string arguments to be long or complex."
462+
]
463+
},
464+
{
465+
"cell_type": "code",
466+
"execution_count": 1,
467+
"id": "a47c809b-852f-46bd-8b9e-d9534c17213d",
468+
"metadata": {},
469+
"outputs": [
470+
{
471+
"name": "stdout",
472+
"output_type": "stream",
473+
"text": [
474+
"================================\u001b[1m Human Message \u001b[0m=================================\n",
475+
"\n",
476+
"Use the tool to calculate 3^3.\n",
477+
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
478+
"\n",
479+
"[{'id': 'rs_6894ff5747c0819d9b02fc5645b0be9c000169fd9fb68d99', 'summary': [], 'type': 'reasoning'}, {'call_id': 'call_7SYwMSQPbbEqFcKlKOpXeEux', 'input': 'print(3**3)', 'name': 'execute_code', 'type': 'custom_tool_call', 'id': 'ctc_6894ff5b9f54819d8155a63638d34103000169fd9fb68d99', 'status': 'completed'}]\n",
480+
"Tool Calls:\n",
481+
" execute_code (call_7SYwMSQPbbEqFcKlKOpXeEux)\n",
482+
" Call ID: call_7SYwMSQPbbEqFcKlKOpXeEux\n",
483+
" Args:\n",
484+
" __arg1: print(3**3)\n",
485+
"=================================\u001b[1m Tool Message \u001b[0m=================================\n",
486+
"Name: execute_code\n",
487+
"\n",
488+
"[{'type': 'custom_tool_call_output', 'output': '27'}]\n",
489+
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
490+
"\n",
491+
"[{'type': 'text', 'text': '27', 'annotations': [], 'id': 'msg_6894ff5db3b8819d9159b3a370a25843000169fd9fb68d99'}]\n"
492+
]
493+
}
494+
],
495+
"source": [
496+
"from langchain_openai import ChatOpenAI, custom_tool\n",
497+
"from langgraph.prebuilt import create_react_agent\n",
498+
"\n",
499+
"\n",
500+
"@custom_tool\n",
501+
"def execute_code(code: str) -> str:\n",
502+
" \"\"\"Execute python code.\"\"\"\n",
503+
" return \"27\"\n",
504+
"\n",
505+
"\n",
506+
"llm = ChatOpenAI(model=\"gpt-5\", output_version=\"responses/v1\")\n",
507+
"\n",
508+
"agent = create_react_agent(llm, [execute_code])\n",
509+
"\n",
510+
"input_message = {\"role\": \"user\", \"content\": \"Use the tool to calculate 3^3.\"}\n",
511+
"for step in agent.stream(\n",
512+
" {\"messages\": [input_message]},\n",
513+
" stream_mode=\"values\",\n",
514+
"):\n",
515+
" step[\"messages\"][-1].pretty_print()"
516+
]
517+
},
518+
{
519+
"cell_type": "markdown",
520+
"id": "5ef93be6-6d4c-4eea-acfd-248774074082",
521+
"metadata": {},
522+
"source": [
523+
"<details>\n",
524+
"<summary>Context-free grammars</summary>\n",
525+
"\n",
526+
"OpenAI supports the specification of a [context-free grammar](https://platform.openai.com/docs/guides/function-calling#context-free-grammars) for custom tool inputs in `lark` or `regex` format. See [OpenAI docs](https://platform.openai.com/docs/guides/function-calling#context-free-grammars) for details. The `format` parameter can be passed into `@custom_tool` as shown below:"
527+
]
528+
},
529+
{
530+
"cell_type": "code",
531+
"execution_count": 3,
532+
"id": "2ae04586-be33-49c6-8947-7867801d868f",
533+
"metadata": {},
534+
"outputs": [
535+
{
536+
"name": "stdout",
537+
"output_type": "stream",
538+
"text": [
539+
"================================\u001b[1m Human Message \u001b[0m=================================\n",
540+
"\n",
541+
"Use the tool to calculate 3^3.\n",
542+
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
543+
"\n",
544+
"[{'id': 'rs_689500828a8481a297ff0f98e328689c0681550c89797f43', 'summary': [], 'type': 'reasoning'}, {'call_id': 'call_jzH01RVhu6EFz7yUrOFXX55s', 'input': '3 * 3 * 3', 'name': 'do_math', 'type': 'custom_tool_call', 'id': 'ctc_6895008d57bc81a2b84d0993517a66b90681550c89797f43', 'status': 'completed'}]\n",
545+
"Tool Calls:\n",
546+
" do_math (call_jzH01RVhu6EFz7yUrOFXX55s)\n",
547+
" Call ID: call_jzH01RVhu6EFz7yUrOFXX55s\n",
548+
" Args:\n",
549+
" __arg1: 3 * 3 * 3\n",
550+
"=================================\u001b[1m Tool Message \u001b[0m=================================\n",
551+
"Name: do_math\n",
552+
"\n",
553+
"[{'type': 'custom_tool_call_output', 'output': '27'}]\n",
554+
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
555+
"\n",
556+
"[{'type': 'text', 'text': '27', 'annotations': [], 'id': 'msg_6895009776b881a2a25f0be8507d08f20681550c89797f43'}]\n"
557+
]
558+
}
559+
],
560+
"source": [
561+
"from langchain_openai import ChatOpenAI, custom_tool\n",
562+
"from langgraph.prebuilt import create_react_agent\n",
563+
"\n",
564+
"grammar = \"\"\"\n",
565+
"start: expr\n",
566+
"expr: term (SP ADD SP term)* -> add\n",
567+
"| term\n",
568+
"term: factor (SP MUL SP factor)* -> mul\n",
569+
"| factor\n",
570+
"factor: INT\n",
571+
"SP: \" \"\n",
572+
"ADD: \"+\"\n",
573+
"MUL: \"*\"\n",
574+
"%import common.INT\n",
575+
"\"\"\"\n",
576+
"\n",
577+
"format_ = {\"type\": \"grammar\", \"syntax\": \"lark\", \"definition\": grammar}\n",
578+
"\n",
579+
"\n",
580+
"# highlight-next-line\n",
581+
"@custom_tool(format=format_)\n",
582+
"def do_math(input_string: str) -> str:\n",
583+
" \"\"\"Do a mathematical operation.\"\"\"\n",
584+
" return \"27\"\n",
585+
"\n",
586+
"\n",
587+
"llm = ChatOpenAI(model=\"gpt-5\", output_version=\"responses/v1\")\n",
588+
"\n",
589+
"agent = create_react_agent(llm, [do_math])\n",
590+
"\n",
591+
"input_message = {\"role\": \"user\", \"content\": \"Use the tool to calculate 3^3.\"}\n",
592+
"for step in agent.stream(\n",
593+
" {\"messages\": [input_message]},\n",
594+
" stream_mode=\"values\",\n",
595+
"):\n",
596+
" step[\"messages\"][-1].pretty_print()"
597+
]
598+
},
599+
{
600+
"cell_type": "markdown",
601+
"id": "c63430c9-c7b0-4e92-a491-3f165dddeb8f",
602+
"metadata": {},
603+
"source": [
604+
"</details>"
605+
]
606+
},
450607
{
451608
"cell_type": "markdown",
452609
"id": "84833dd0-17e9-4269-82ed-550639d65751",

libs/core/langchain_core/tools/base.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,14 @@
7474
from collections.abc import Sequence
7575

7676
FILTERED_ARGS = ("run_manager", "callbacks")
77-
TOOL_MESSAGE_BLOCK_TYPES = ("text", "image_url", "image", "json", "search_result")
77+
TOOL_MESSAGE_BLOCK_TYPES = (
78+
"text",
79+
"image_url",
80+
"image",
81+
"json",
82+
"search_result",
83+
"custom_tool_call_output",
84+
)
7885

7986

8087
class SchemaAnnotationError(TypeError):

libs/core/langchain_core/utils/function_calling.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -575,12 +575,23 @@ def convert_to_openai_tool(
575575
576576
Added support for OpenAI's image generation built-in tool.
577577
"""
578+
from langchain_core.tools import Tool
579+
578580
if isinstance(tool, dict):
579581
if tool.get("type") in _WellKnownOpenAITools:
580582
return tool
581583
# As of 03.12.25 can be "web_search_preview" or "web_search_preview_2025_03_11"
582584
if (tool.get("type") or "").startswith("web_search_preview"):
583585
return tool
586+
if isinstance(tool, Tool) and (tool.metadata or {}).get("type") == "custom_tool":
587+
oai_tool = {
588+
"type": "custom",
589+
"name": tool.name,
590+
"description": tool.description,
591+
}
592+
if tool.metadata is not None and "format" in tool.metadata:
593+
oai_tool["format"] = tool.metadata["format"]
594+
return oai_tool
584595
oai_function = convert_to_openai_function(tool, strict=strict)
585596
return {"type": "function", "function": oai_function}
586597

libs/partners/openai/langchain_openai/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from langchain_openai.chat_models import AzureChatOpenAI, ChatOpenAI
22
from langchain_openai.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
33
from langchain_openai.llms import AzureOpenAI, OpenAI
4+
from langchain_openai.tools import custom_tool
45

56
__all__ = [
67
"OpenAI",
@@ -9,4 +10,5 @@
910
"AzureOpenAI",
1011
"AzureChatOpenAI",
1112
"AzureOpenAIEmbeddings",
13+
"custom_tool",
1214
]

libs/partners/openai/langchain_openai/chat_models/base.py

Lines changed: 47 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3582,6 +3582,20 @@ def _make_computer_call_output_from_message(message: ToolMessage) -> dict:
35823582
return computer_call_output
35833583

35843584

3585+
def _make_custom_tool_output_from_message(message: ToolMessage) -> Optional[dict]:
3586+
custom_tool_output = None
3587+
for block in message.content:
3588+
if isinstance(block, dict) and block.get("type") == "custom_tool_call_output":
3589+
custom_tool_output = {
3590+
"type": "custom_tool_call_output",
3591+
"call_id": message.tool_call_id,
3592+
"output": block.get("output") or "",
3593+
}
3594+
break
3595+
3596+
return custom_tool_output
3597+
3598+
35853599
def _pop_index_and_sub_index(block: dict) -> dict:
35863600
"""When streaming, langchain-core uses the ``index`` key to aggregate
35873601
text blocks. OpenAI API does not support this key, so we need to remove it.
@@ -3608,7 +3622,10 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
36083622
msg.pop("name")
36093623
if msg["role"] == "tool":
36103624
tool_output = msg["content"]
3611-
if lc_msg.additional_kwargs.get("type") == "computer_call_output":
3625+
custom_tool_output = _make_custom_tool_output_from_message(lc_msg) # type: ignore[arg-type]
3626+
if custom_tool_output:
3627+
input_.append(custom_tool_output)
3628+
elif lc_msg.additional_kwargs.get("type") == "computer_call_output":
36123629
computer_call_output = _make_computer_call_output_from_message(
36133630
cast(ToolMessage, lc_msg)
36143631
)
@@ -3663,6 +3680,7 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
36633680
"file_search_call",
36643681
"function_call",
36653682
"computer_call",
3683+
"custom_tool_call",
36663684
"code_interpreter_call",
36673685
"mcp_call",
36683686
"mcp_list_tools",
@@ -3690,7 +3708,8 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
36903708
content_call_ids = {
36913709
block["call_id"]
36923710
for block in input_
3693-
if block.get("type") == "function_call" and "call_id" in block
3711+
if block.get("type") in ("function_call", "custom_tool_call")
3712+
and "call_id" in block
36943713
}
36953714
for tool_call in tool_calls:
36963715
if tool_call["id"] not in content_call_ids:
@@ -3841,6 +3860,15 @@ def _construct_lc_result_from_responses_api(
38413860
"error": error,
38423861
}
38433862
invalid_tool_calls.append(tool_call)
3863+
elif output.type == "custom_tool_call":
3864+
content_blocks.append(output.model_dump(exclude_none=True, mode="json"))
3865+
tool_call = {
3866+
"type": "tool_call",
3867+
"name": output.name,
3868+
"args": {"__arg1": output.input},
3869+
"id": output.call_id,
3870+
}
3871+
tool_calls.append(tool_call)
38443872
elif output.type in (
38453873
"reasoning",
38463874
"web_search_call",
@@ -4044,6 +4072,23 @@ def _advance(output_idx: int, sub_idx: Optional[int] = None) -> None:
40444072
tool_output = chunk.item.model_dump(exclude_none=True, mode="json")
40454073
tool_output["index"] = current_index
40464074
content.append(tool_output)
4075+
elif (
4076+
chunk.type == "response.output_item.done"
4077+
and chunk.item.type == "custom_tool_call"
4078+
):
4079+
_advance(chunk.output_index)
4080+
tool_output = chunk.item.model_dump(exclude_none=True, mode="json")
4081+
tool_output["index"] = current_index
4082+
content.append(tool_output)
4083+
tool_call_chunks.append(
4084+
{
4085+
"type": "tool_call_chunk",
4086+
"name": chunk.item.name,
4087+
"args": json.dumps({"__arg1": chunk.item.input}),
4088+
"id": chunk.item.call_id,
4089+
"index": current_index,
4090+
}
4091+
)
40474092
elif chunk.type == "response.function_call_arguments.delta":
40484093
_advance(chunk.output_index)
40494094
tool_call_chunks.append(
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from langchain_openai.tools.custom_tool import custom_tool
2+
3+
__all__ = ["custom_tool"]

0 commit comments

Comments
 (0)