diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 76fb4e8a75398..b473a0b235837 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -69,7 +69,7 @@ convert_to_json_schema, convert_to_openai_tool, ) -from langchain_core.utils.pydantic import TypeBaseModel, is_basemodel_subclass +from langchain_core.utils.pydantic import is_basemodel_subclass from langchain_core.utils.utils import LC_ID_PREFIX, from_env if TYPE_CHECKING: @@ -1650,7 +1650,7 @@ class AnswerWithJustification(BaseModel): ) if isinstance(schema, type) and is_basemodel_subclass(schema): output_parser: OutputParserLike = PydanticToolsParser( - tools=[cast("TypeBaseModel", schema)], first_tool_only=True + tools=[schema], first_tool_only=True ) else: key_name = convert_to_openai_tool(schema)["function"]["name"] diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 9ca9321965e93..3a8a056071c98 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -96,7 +96,7 @@ ) from langchain_core.utils.aiter import aclosing, atee, py_anext from langchain_core.utils.iter import safetee -from langchain_core.utils.pydantic import create_model_v2 +from langchain_core.utils.pydantic import TypeBaseModel, create_model_v2, get_fields if TYPE_CHECKING: from langchain_core.callbacks.manager import ( @@ -355,14 +355,14 @@ def OutputType(self) -> type[Output]: # noqa: N802 raise TypeError(msg) @property - def input_schema(self) -> type[BaseModel]: + def input_schema(self) -> TypeBaseModel: """The type of input this `Runnable` accepts specified as a Pydantic model.""" return self.get_input_schema() def get_input_schema( self, config: RunnableConfig | None = None, # noqa: ARG002 - ) -> type[BaseModel]: + ) -> TypeBaseModel: """Get a Pydantic model that can be used to validate input to the `Runnable`. `Runnable` objects that leverage the `configurable_fields` and @@ -427,10 +427,13 @@ def add_one(x: int) -> int: !!! version-added "Added in version 0.3.0" """ - return self.get_input_schema(config).model_json_schema() + schema = self.get_input_schema(config) + if issubclass(schema, BaseModel): + return schema.model_json_schema() + return schema.schema() @property - def output_schema(self) -> type[BaseModel]: + def output_schema(self) -> TypeBaseModel: """Output schema. The type of output this `Runnable` produces specified as a Pydantic model. @@ -440,7 +443,7 @@ def output_schema(self) -> type[BaseModel]: def get_output_schema( self, config: RunnableConfig | None = None, # noqa: ARG002 - ) -> type[BaseModel]: + ) -> TypeBaseModel: """Get a Pydantic model that can be used to validate output to the `Runnable`. `Runnable` objects that leverage the `configurable_fields` and @@ -505,7 +508,10 @@ def add_one(x: int) -> int: !!! version-added "Added in version 0.3.0" """ - return self.get_output_schema(config).model_json_schema() + schema = self.get_output_schema(config) + if issubclass(schema, BaseModel): + return schema.model_json_schema() + return schema.schema() @property def config_specs(self) -> list[ConfigurableFieldSpec]: @@ -2671,7 +2677,7 @@ def configurable_alternatives( def _seq_input_schema( steps: list[Runnable[Any, Any]], config: RunnableConfig | None -) -> type[BaseModel]: +) -> TypeBaseModel: # Import locally to prevent circular import from langchain_core.runnables.passthrough import ( # noqa: PLC0415 RunnableAssign, @@ -2689,7 +2695,7 @@ def _seq_input_schema( "RunnableSequenceInput", field_definitions={ k: (v.annotation, v.default) - for k, v in next_input_schema.model_fields.items() + for k, v in get_fields(next_input_schema).items() if k not in first.mapper.steps__ }, ) @@ -2701,7 +2707,7 @@ def _seq_input_schema( def _seq_output_schema( steps: list[Runnable[Any, Any]], config: RunnableConfig | None -) -> type[BaseModel]: +) -> TypeBaseModel: # Import locally to prevent circular import from langchain_core.runnables.passthrough import ( # noqa: PLC0415 RunnableAssign, @@ -2721,7 +2727,7 @@ def _seq_output_schema( field_definitions={ **{ k: (v.annotation, v.default) - for k, v in prev_output_schema.model_fields.items() + for k, v in get_fields(prev_output_schema).items() }, **{ k: (v.annotation, v.default) @@ -2738,11 +2744,11 @@ def _seq_output_schema( "RunnableSequenceOutput", field_definitions={ k: (v.annotation, v.default) - for k, v in prev_output_schema.model_fields.items() + for k, v in get_fields(prev_output_schema).items() if k in last.keys }, ) - field = prev_output_schema.model_fields[last.keys] + field = get_fields(prev_output_schema)[last.keys] return create_model_v2( "RunnableSequenceOutput", root=(field.annotation, field.default) ) @@ -2924,7 +2930,7 @@ def OutputType(self) -> type[Output]: return self.last.OutputType @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: """Get the input schema of the `Runnable`. Args: @@ -2937,9 +2943,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod return _seq_input_schema(self.steps, config) @override - def get_output_schema( - self, config: RunnableConfig | None = None - ) -> type[BaseModel]: + def get_output_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: """Get the output schema of the `Runnable`. Args: @@ -3653,7 +3657,7 @@ def InputType(self) -> Any: return Any @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: """Get the input schema of the `Runnable`. Args: @@ -3664,8 +3668,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod """ if all( - s.get_input_schema(config).model_json_schema().get("type", "object") - == "object" + s.get_input_jsonschema(config).get("type", "object") == "object" for s in self.steps__.values() ): # This is correct, but pydantic typings/mypy don't think so. @@ -3674,7 +3677,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod field_definitions={ k: (v.annotation, v.default) for step in self.steps__.values() - for k, v in step.get_input_schema(config).model_fields.items() + for k, v in get_fields(step.get_input_schema(config)).items() if k != "__root__" }, ) @@ -4460,7 +4463,7 @@ def InputType(self) -> Any: return Any @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: """The Pydantic schema for the input to this `Runnable`. Args: @@ -5437,15 +5440,13 @@ def OutputType(self) -> type[Output]: ) @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: if self.custom_input_type is not None: return super().get_input_schema(config) return self.bound.get_input_schema(merge_configs(self.config, config)) @override - def get_output_schema( - self, config: RunnableConfig | None = None - ) -> type[BaseModel]: + def get_output_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: if self.custom_output_type is not None: return super().get_output_schema(config) return self.bound.get_output_schema(merge_configs(self.config, config)) diff --git a/libs/core/langchain_core/runnables/branch.py b/libs/core/langchain_core/runnables/branch.py index e7d40151f11e3..f526a09fb7783 100644 --- a/libs/core/langchain_core/runnables/branch.py +++ b/libs/core/langchain_core/runnables/branch.py @@ -13,7 +13,7 @@ cast, ) -from pydantic import BaseModel, ConfigDict +from pydantic import ConfigDict from typing_extensions import override from langchain_core.runnables.base import ( @@ -35,6 +35,7 @@ Output, get_unique_config_specs, ) +from langchain_core.utils.pydantic import TypeBaseModel class RunnableBranch(RunnableSerializable[Input, Output]): @@ -154,7 +155,7 @@ def get_lc_namespace(cls) -> list[str]: return ["langchain", "schema", "runnable"] @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: runnables = ( [self.default] + [r for _, r in self.branches] @@ -162,10 +163,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod ) for runnable in runnables: - if ( - runnable.get_input_schema(config).model_json_schema().get("type") - is not None - ): + if runnable.get_input_jsonschema(config).get("type") is not None: return runnable.get_input_schema(config) return super().get_input_schema(config) diff --git a/libs/core/langchain_core/runnables/configurable.py b/libs/core/langchain_core/runnables/configurable.py index 693c68b7f37e6..586186ad39c32 100644 --- a/libs/core/langchain_core/runnables/configurable.py +++ b/libs/core/langchain_core/runnables/configurable.py @@ -19,7 +19,7 @@ ) from weakref import WeakValueDictionary -from pydantic import BaseModel, ConfigDict +from pydantic import ConfigDict from typing_extensions import override from langchain_core.runnables.base import Runnable, RunnableSerializable @@ -41,6 +41,7 @@ gather_with_concurrency, get_unique_config_specs, ) +from langchain_core.utils.pydantic import TypeBaseModel if TYPE_CHECKING: from langchain_core.runnables.graph import Graph @@ -90,14 +91,12 @@ def OutputType(self) -> type[Output]: return self.default.OutputType @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: runnable, config = self.prepare(config) return runnable.get_input_schema(config) @override - def get_output_schema( - self, config: RunnableConfig | None = None - ) -> type[BaseModel]: + def get_output_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: runnable, config = self.prepare(config) return runnable.get_output_schema(config) diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index 2e76a9b81f68e..2c5cd491ea5b2 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -7,7 +7,7 @@ from functools import wraps from typing import TYPE_CHECKING, Any, cast -from pydantic import BaseModel, ConfigDict +from pydantic import ConfigDict from typing_extensions import override from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager @@ -29,6 +29,7 @@ get_unique_config_specs, ) from langchain_core.utils.aiter import py_anext +from langchain_core.utils.pydantic import TypeBaseModel if TYPE_CHECKING: from langchain_core.callbacks.manager import AsyncCallbackManagerForChainRun @@ -116,13 +117,11 @@ def OutputType(self) -> type[Output]: return self.runnable.OutputType @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: return self.runnable.get_input_schema(config) @override - def get_output_schema( - self, config: RunnableConfig | None = None - ) -> type[BaseModel]: + def get_output_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: return self.runnable.get_output_schema(config) @property diff --git a/libs/core/langchain_core/runnables/graph.py b/libs/core/langchain_core/runnables/graph.py index bf1d071bf209f..de48aae241987 100644 --- a/libs/core/langchain_core/runnables/graph.py +++ b/libs/core/langchain_core/runnables/graph.py @@ -19,13 +19,15 @@ from langchain_core.load.serializable import to_json_not_implemented from langchain_core.runnables.base import Runnable, RunnableSerializable -from langchain_core.utils.pydantic import _IgnoreUnserializable, is_basemodel_subclass +from langchain_core.utils.pydantic import ( + TypeBaseModel, + _IgnoreUnserializable, + is_basemodel_subclass, +) if TYPE_CHECKING: from collections.abc import Sequence - from pydantic import BaseModel - from langchain_core.runnables.base import Runnable as RunnableType @@ -98,7 +100,7 @@ class Node(NamedTuple): """The unique identifier of the node.""" name: str """The name of the node.""" - data: type[BaseModel] | RunnableType | None + data: TypeBaseModel | RunnableType | None """The data of the node.""" metadata: dict[str, Any] | None """Optional metadata for the node. """ @@ -178,7 +180,7 @@ class MermaidDrawMethod(Enum): def node_data_str( id: str, - data: type[BaseModel] | RunnableType | None, + data: TypeBaseModel | RunnableType | None, ) -> str: """Convert the data of a node to a string. @@ -312,7 +314,7 @@ def next_id(self) -> str: def add_node( self, - data: type[BaseModel] | RunnableType | None, + data: TypeBaseModel | RunnableType | None, id: str | None = None, *, metadata: dict[str, Any] | None = None, diff --git a/libs/core/langchain_core/runnables/passthrough.py b/libs/core/langchain_core/runnables/passthrough.py index 329e3a174848e..7046cb49ef9ef 100644 --- a/libs/core/langchain_core/runnables/passthrough.py +++ b/libs/core/langchain_core/runnables/passthrough.py @@ -12,7 +12,7 @@ cast, ) -from pydantic import BaseModel, RootModel +from pydantic import RootModel from typing_extensions import override from langchain_core.runnables.base import ( @@ -35,7 +35,7 @@ ) from langchain_core.utils.aiter import atee, py_anext from langchain_core.utils.iter import safetee -from langchain_core.utils.pydantic import create_model_v2 +from langchain_core.utils.pydantic import TypeBaseModel, create_model_v2, get_fields if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator, Mapping @@ -426,7 +426,7 @@ def get_name(self, suffix: str | None = None, *, name: str | None = None) -> str return super().get_name(suffix, name=name) @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: map_input_schema = self.mapper.get_input_schema(config) if not issubclass(map_input_schema, RootModel): # ie. it's a dict @@ -435,9 +435,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod return super().get_input_schema(config) @override - def get_output_schema( - self, config: RunnableConfig | None = None - ) -> type[BaseModel]: + def get_output_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: map_input_schema = self.mapper.get_input_schema(config) map_output_schema = self.mapper.get_output_schema(config) if not issubclass(map_input_schema, RootModel) and not issubclass( @@ -445,7 +443,7 @@ def get_output_schema( ): fields = {} - for name, field_info in map_input_schema.model_fields.items(): + for name, field_info in get_fields(map_input_schema).items(): fields[name] = (field_info.annotation, field_info.default) for name, field_info in map_output_schema.model_fields.items(): diff --git a/libs/core/langchain_core/tools/base.py b/libs/core/langchain_core/tools/base.py index def3b2270897d..67f1ffb71dd98 100644 --- a/libs/core/langchain_core/tools/base.py +++ b/libs/core/langchain_core/tools/base.py @@ -284,7 +284,7 @@ def create_schema_from_function( parse_docstring: bool = False, error_on_invalid_docstring: bool = False, include_injected: bool = True, -) -> type[BaseModel]: +) -> TypeBaseModel: """Create a Pydantic schema from a function's signature. Args: @@ -530,11 +530,8 @@ def args(self) -> dict: """ if isinstance(self.args_schema, dict): json_schema = self.args_schema - elif self.args_schema and issubclass(self.args_schema, BaseModelV1): - json_schema = self.args_schema.schema() else: - input_schema = self.get_input_schema() - json_schema = input_schema.model_json_schema() + json_schema = self.get_input_jsonschema() return json_schema["properties"] @property @@ -565,7 +562,7 @@ def tool_call_schema(self) -> ArgsSchema: # --- Runnable --- @override - def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: + def get_input_schema(self, config: RunnableConfig | None = None) -> TypeBaseModel: """The tool's input schema. Args: @@ -656,7 +653,12 @@ def _parse_input( tool_input[k] = tool_call_id result = input_args.model_validate(tool_input) result_dict = result.model_dump() - elif issubclass(input_args, BaseModelV1): + return { + k: getattr(result, k) + for k, v in result_dict.items() + if k in tool_input + } + if issubclass(input_args, BaseModelV1): for k, v in get_all_basemodel_annotations(input_args).items(): if _is_injected_arg_type(v, injected_type=InjectedToolCallId): if tool_call_id is None: @@ -669,16 +671,15 @@ def _parse_input( ) raise ValueError(msg) tool_input[k] = tool_call_id - result = input_args.parse_obj(tool_input) - result_dict = result.dict() - else: - msg = ( - f"args_schema must be a Pydantic BaseModel, got {self.args_schema}" - ) - raise NotImplementedError(msg) - return { - k: getattr(result, k) for k, v in result_dict.items() if k in tool_input - } + result_v1 = input_args.parse_obj(tool_input) + result_dict = result_v1.dict() + return { + k: getattr(result_v1, k) + for k, v in result_dict.items() + if k in tool_input + } + msg = f"args_schema must be a Pydantic BaseModel, got {self.args_schema}" + raise NotImplementedError(msg) return tool_input @abstractmethod diff --git a/libs/core/langchain_core/tools/convert.py b/libs/core/langchain_core/tools/convert.py index cf16aef45af18..6678b909f2fe3 100644 --- a/libs/core/langchain_core/tools/convert.py +++ b/libs/core/langchain_core/tools/convert.py @@ -11,6 +11,7 @@ from langchain_core.tools.base import ArgsSchema, BaseTool from langchain_core.tools.simple import Tool from langchain_core.tools.structured import StructuredTool +from langchain_core.utils.pydantic import TypeBaseModel @overload @@ -238,7 +239,7 @@ def _tool_factory(dec_func: Callable | Runnable) -> BaseTool: if isinstance(dec_func, Runnable): runnable = dec_func - if runnable.input_schema.model_json_schema().get("type") != "object": + if runnable.get_input_jsonschema().get("type") != "object": msg = "Runnable must have an object schema." raise ValueError(msg) @@ -355,7 +356,7 @@ def _partial(func: Callable | Runnable) -> BaseTool: def _get_description_from_runnable(runnable: Runnable) -> str: """Generate a placeholder description of a runnable.""" - input_schema = runnable.input_schema.model_json_schema() + input_schema = runnable.get_input_jsonschema() return f"Takes {input_schema}." @@ -381,7 +382,7 @@ def _get_schema_from_runnable_and_arg_types( def convert_runnable_to_tool( runnable: Runnable, - args_schema: type[BaseModel] | None = None, + args_schema: TypeBaseModel | None = None, *, name: str | None = None, description: str | None = None, @@ -404,7 +405,7 @@ def convert_runnable_to_tool( description = description or _get_description_from_runnable(runnable) name = name or runnable.get_name() - schema = runnable.input_schema.model_json_schema() + schema = runnable.get_input_jsonschema() if schema.get("type") == "string": return Tool( name=name, diff --git a/libs/core/langchain_core/utils/pydantic.py b/libs/core/langchain_core/utils/pydantic.py index 9d3b228a59197..40aa2033c9851 100644 --- a/libs/core/langchain_core/utils/pydantic.py +++ b/libs/core/langchain_core/utils/pydantic.py @@ -68,8 +68,8 @@ def get_pydantic_major_version() -> int: IS_PYDANTIC_V1 = PYDANTIC_VERSION.major == 1 IS_PYDANTIC_V2 = PYDANTIC_VERSION.major == 2 -PydanticBaseModel = BaseModel -TypeBaseModel = type[BaseModel] +PydanticBaseModel = BaseModel | BaseModelV1 +TypeBaseModel = type[BaseModel] | type[BaseModelV1] TBaseModel = TypeVar("TBaseModel", bound=PydanticBaseModel) @@ -205,7 +205,7 @@ def _create_subset_model_v1( *, descriptions: dict | None = None, fn_description: str | None = None, -) -> type[BaseModel]: +) -> type[BaseModelV1]: """Create a Pydantic model with only a subset of model's fields.""" fields = {} @@ -276,7 +276,7 @@ def _create_subset_model( *, descriptions: dict | None = None, fn_description: str | None = None, -) -> type[BaseModel]: +) -> TypeBaseModel: """Create subset model using the same pydantic version as the input model. Returns: diff --git a/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py b/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py index 2df7de3575665..6573450ba73d8 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py +++ b/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py @@ -179,7 +179,7 @@ class SampleModel(BaseModel): # Ignoring mypy error that appears in python 3.8, but not 3.11. # This seems to be functionally correct, so we'll ignore the error. pydantic_parser = PydanticOutputParser[SampleModel](pydantic_object=SampleModel) - schema = pydantic_parser.get_output_schema().model_json_schema() + schema = pydantic_parser.get_output_jsonschema() assert schema == { "properties": { diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index c8afa1eea5d62..246548fdb62d6 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -485,7 +485,7 @@ def foo(x: int) -> int: foo_ = RunnableLambda(foo) - assert foo_.assign(bar=lambda _: "foo").get_output_schema().model_json_schema() == { + assert foo_.assign(bar=lambda _: "foo").get_output_jsonschema() == { "properties": {"bar": {"title": "Bar"}, "root": {"title": "Root"}}, "required": ["root", "bar"], "title": "RunnableAssignOutput", diff --git a/libs/core/tests/unit_tests/test_tools.py b/libs/core/tests/unit_tests/test_tools.py index 9fdd2afc8199e..39326390dc9a9 100644 --- a/libs/core/tests/unit_tests/test_tools.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -1793,7 +1793,7 @@ def _run(self, x: int, y: str) -> Any: return y tool_ = InheritedInjectedArgTool() - assert tool_.get_input_schema().model_json_schema() == { + assert tool_.get_input_jsonschema() == { "title": "FooSchema", # Matches the title from the provided schema "description": "foo.", "type": "object", @@ -1970,7 +1970,7 @@ def _run(self, *args: Any, **kwargs: Any) -> str: tool = SomeTool(name="some_tool", description="some description") - assert tool.get_input_schema().model_json_schema() == { + assert tool.get_input_jsonschema() == { "properties": { "a": {"title": "A", "type": "integer"}, "b": {"title": "B", "type": "string"}, @@ -2468,7 +2468,7 @@ def test_structured_tool_args_schema_dict() -> None: assert _get_tool_call_json_schema(tool) == args_schema # test that the input schema is the same as the parent (Runnable) input schema assert ( - tool.get_input_schema().model_json_schema() + tool.get_input_jsonschema() == create_model_v2( tool.get_name("Input"), root=tool.InputType, @@ -2504,7 +2504,7 @@ def test_simple_tool_args_schema_dict() -> None: assert _get_tool_call_json_schema(tool) == args_schema # test that the input schema is the same as the parent (Runnable) input schema assert ( - tool.get_input_schema().model_json_schema() + tool.get_input_jsonschema() == create_model_v2( tool.get_name("Input"), root=tool.InputType, diff --git a/libs/langchain/langchain_classic/chains/hyde/base.py b/libs/langchain/langchain_classic/chains/hyde/base.py index 7ee9780b4ab85..e4b4a43ef7b16 100644 --- a/libs/langchain/langchain_classic/chains/hyde/base.py +++ b/libs/langchain/langchain_classic/chains/hyde/base.py @@ -40,7 +40,7 @@ class HypotheticalDocumentEmbedder(Chain, Embeddings): @property def input_keys(self) -> list[str]: """Input keys for Hyde's LLM chain.""" - return self.llm_chain.input_schema.model_json_schema()["required"] + return self.llm_chain.get_input_jsonschema()["required"] @property def output_keys(self) -> list[str]: