File tree Expand file tree Collapse file tree 2 files changed +88
-0
lines changed
libs/partners/openai/langchain_openai/chat_models Expand file tree Collapse file tree 2 files changed +88
-0
lines changed Original file line number Diff line number Diff line change @@ -809,6 +809,50 @@ def with_structured_output(
809
809
810
810
Note: ``strict`` can only be non-null if ``method`` is
811
811
``"json_schema"`` or ``"function_calling"``.
812
+ tools:
813
+ A list of tool-like objects to bind to the chat model. Requires that:
814
+
815
+ - ``method`` is ``"json_schema"`` (default).
816
+ - ``strict=True``
817
+ - ``include_raw=True``
818
+
819
+ If a model elects to call a
820
+ tool, the resulting ``AIMessage`` in ``"raw"`` will include tool calls.
821
+
822
+ .. dropdown:: Example
823
+
824
+ .. code-block:: python
825
+
826
+ from langchain.chat_models import init_chat_model
827
+ from pydantic import BaseModel
828
+
829
+
830
+ class ResponseSchema(BaseModel):
831
+ response: str
832
+
833
+
834
+ def get_weather(location: str) -> str:
835
+ \" \" \" Get weather at a location.\" \" \"
836
+ pass
837
+
838
+ llm = init_chat_model("openai:gpt-4o-mini")
839
+
840
+ structured_llm = llm.with_structured_output(
841
+ ResponseSchema,
842
+ tools=[get_weather],
843
+ strict=True,
844
+ include_raw=True,
845
+ )
846
+
847
+ structured_llm.invoke("What's the weather in Boston?")
848
+
849
+ .. code-block:: python
850
+
851
+ {
852
+ "raw": AIMessage(content="", tool_calls=[...], ...),
853
+ "parsing_error": None,
854
+ "parsed": None,
855
+ }
812
856
813
857
kwargs: Additional keyword args aren't supported.
814
858
Original file line number Diff line number Diff line change @@ -2552,6 +2552,50 @@ def with_structured_output(
2552
2552
2553
2553
Note: ``strict`` can only be non-null if ``method`` is
2554
2554
``"json_schema"`` or ``"function_calling"``.
2555
+ tools:
2556
+ A list of tool-like objects to bind to the chat model. Requires that:
2557
+
2558
+ - ``method`` is ``"json_schema"`` (default).
2559
+ - ``strict=True``
2560
+ - ``include_raw=True``
2561
+
2562
+ If a model elects to call a
2563
+ tool, the resulting ``AIMessage`` in ``"raw"`` will include tool calls.
2564
+
2565
+ .. dropdown:: Example
2566
+
2567
+ .. code-block:: python
2568
+
2569
+ from langchain.chat_models import init_chat_model
2570
+ from pydantic import BaseModel
2571
+
2572
+
2573
+ class ResponseSchema(BaseModel):
2574
+ response: str
2575
+
2576
+
2577
+ def get_weather(location: str) -> str:
2578
+ \" \" \" Get weather at a location.\" \" \"
2579
+ pass
2580
+
2581
+ llm = init_chat_model("openai:gpt-4o-mini")
2582
+
2583
+ structured_llm = llm.with_structured_output(
2584
+ ResponseSchema,
2585
+ tools=[get_weather],
2586
+ strict=True,
2587
+ include_raw=True,
2588
+ )
2589
+
2590
+ structured_llm.invoke("What's the weather in Boston?")
2591
+
2592
+ .. code-block:: python
2593
+
2594
+ {
2595
+ "raw": AIMessage(content="", tool_calls=[...], ...),
2596
+ "parsing_error": None,
2597
+ "parsed": None,
2598
+ }
2555
2599
2556
2600
kwargs: Additional keyword args aren't supported.
2557
2601
You can’t perform that action at this time.
0 commit comments