Skip to content

Commit 915c446

Browse files
cbornetmdrxy
andauthored
chore(core): add ruff rule PLR2004 (#33706)
Co-authored-by: Mason Daugherty <[email protected]>
1 parent d1e2099 commit 915c446

File tree

10 files changed

+93
-52
lines changed

10 files changed

+93
-52
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1763,9 +1763,12 @@ def _gen_info_and_msg_metadata(
17631763
}
17641764

17651765

1766+
_MAX_CLEANUP_DEPTH = 100
1767+
1768+
17661769
def _cleanup_llm_representation(serialized: Any, depth: int) -> None:
17671770
"""Remove non-serializable objects from a serialized object."""
1768-
if depth > 100: # Don't cooperate for pathological cases
1771+
if depth > _MAX_CLEANUP_DEPTH: # Don't cooperate for pathological cases
17691772
return
17701773

17711774
if not isinstance(serialized, dict):

libs/core/langchain_core/messages/utils.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -328,12 +328,16 @@ def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage:
328328
"""
329329
if isinstance(message, BaseMessage):
330330
message_ = message
331-
elif isinstance(message, str):
332-
message_ = _create_message_from_message_type("human", message)
333-
elif isinstance(message, Sequence) and len(message) == 2:
334-
# mypy doesn't realise this can't be a string given the previous branch
335-
message_type_str, template = message # type: ignore[misc]
336-
message_ = _create_message_from_message_type(message_type_str, template)
331+
elif isinstance(message, Sequence):
332+
if isinstance(message, str):
333+
message_ = _create_message_from_message_type("human", message)
334+
else:
335+
try:
336+
message_type_str, template = message
337+
except ValueError as e:
338+
msg = "Message as a sequence must be (role string, template)"
339+
raise NotImplementedError(msg) from e
340+
message_ = _create_message_from_message_type(message_type_str, template)
337341
elif isinstance(message, dict):
338342
msg_kwargs = message.copy()
339343
try:

libs/core/langchain_core/prompts/chat.py

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1343,11 +1343,25 @@ def _create_template_from_message_type(
13431343
raise ValueError(msg)
13441344
var_name = template[1:-1]
13451345
message = MessagesPlaceholder(variable_name=var_name, optional=True)
1346-
elif len(template) == 2 and isinstance(template[1], bool):
1347-
var_name_wrapped, is_optional = template
1346+
else:
1347+
try:
1348+
var_name_wrapped, is_optional = template
1349+
except ValueError as e:
1350+
msg = (
1351+
"Unexpected arguments for placeholder message type."
1352+
" Expected either a single string variable name"
1353+
" or a list of [variable_name: str, is_optional: bool]."
1354+
f" Got: {template}"
1355+
)
1356+
raise ValueError(msg) from e
1357+
1358+
if not isinstance(is_optional, bool):
1359+
msg = f"Expected is_optional to be a boolean. Got: {is_optional}"
1360+
raise ValueError(msg) # noqa: TRY004
1361+
13481362
if not isinstance(var_name_wrapped, str):
13491363
msg = f"Expected variable name to be a string. Got: {var_name_wrapped}"
1350-
raise ValueError(msg) # noqa:TRY004
1364+
raise ValueError(msg) # noqa: TRY004
13511365
if var_name_wrapped[0] != "{" or var_name_wrapped[-1] != "}":
13521366
msg = (
13531367
f"Invalid placeholder template: {var_name_wrapped}."
@@ -1357,14 +1371,6 @@ def _create_template_from_message_type(
13571371
var_name = var_name_wrapped[1:-1]
13581372

13591373
message = MessagesPlaceholder(variable_name=var_name, optional=is_optional)
1360-
else:
1361-
msg = (
1362-
"Unexpected arguments for placeholder message type."
1363-
" Expected either a single string variable name"
1364-
" or a list of [variable_name: str, is_optional: bool]."
1365-
f" Got: {template}"
1366-
)
1367-
raise ValueError(msg)
13681374
else:
13691375
msg = (
13701376
f"Unexpected message type: {message_type}. Use one of 'human',"
@@ -1418,10 +1424,11 @@ def _convert_to_message_template(
14181424
)
14191425
raise ValueError(msg)
14201426
message = (message["role"], message["content"])
1421-
if len(message) != 2:
1427+
try:
1428+
message_type_str, template = message
1429+
except ValueError as e:
14221430
msg = f"Expected 2-tuple of (role, template), got {message}"
1423-
raise ValueError(msg)
1424-
message_type_str, template = message
1431+
raise ValueError(msg) from e
14251432
if isinstance(message_type_str, str):
14261433
message_ = _create_template_from_message_type(
14271434
message_type_str, template, template_format=template_format

libs/core/langchain_core/runnables/base.py

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,8 @@
118118

119119
Other = TypeVar("Other")
120120

121+
_RUNNABLE_GENERIC_NUM_ARGS = 2 # Input and Output
122+
121123

122124
class Runnable(ABC, Generic[Input, Output]):
123125
"""A unit of work that can be invoked, batched, streamed, transformed and composed.
@@ -309,15 +311,18 @@ def InputType(self) -> type[Input]: # noqa: N802
309311
for base in self.__class__.mro():
310312
if hasattr(base, "__pydantic_generic_metadata__"):
311313
metadata = base.__pydantic_generic_metadata__
312-
if "args" in metadata and len(metadata["args"]) == 2:
314+
if (
315+
"args" in metadata
316+
and len(metadata["args"]) == _RUNNABLE_GENERIC_NUM_ARGS
317+
):
313318
return metadata["args"][0]
314319

315320
# If we didn't find a Pydantic model in the parent classes,
316321
# then loop through __orig_bases__. This corresponds to
317322
# Runnables that are not pydantic models.
318323
for cls in self.__class__.__orig_bases__: # type: ignore[attr-defined]
319324
type_args = get_args(cls)
320-
if type_args and len(type_args) == 2:
325+
if type_args and len(type_args) == _RUNNABLE_GENERIC_NUM_ARGS:
321326
return type_args[0]
322327

323328
msg = (
@@ -340,12 +345,15 @@ def OutputType(self) -> type[Output]: # noqa: N802
340345
for base in self.__class__.mro():
341346
if hasattr(base, "__pydantic_generic_metadata__"):
342347
metadata = base.__pydantic_generic_metadata__
343-
if "args" in metadata and len(metadata["args"]) == 2:
348+
if (
349+
"args" in metadata
350+
and len(metadata["args"]) == _RUNNABLE_GENERIC_NUM_ARGS
351+
):
344352
return metadata["args"][1]
345353

346354
for cls in self.__class__.__orig_bases__: # type: ignore[attr-defined]
347355
type_args = get_args(cls)
348-
if type_args and len(type_args) == 2:
356+
if type_args and len(type_args) == _RUNNABLE_GENERIC_NUM_ARGS:
349357
return type_args[1]
350358

351359
msg = (
@@ -2750,6 +2758,9 @@ def _seq_output_schema(
27502758
return last.get_output_schema(config)
27512759

27522760

2761+
_RUNNABLE_SEQUENCE_MIN_STEPS = 2
2762+
2763+
27532764
class RunnableSequence(RunnableSerializable[Input, Output]):
27542765
"""Sequence of `Runnable` objects, where the output of one is the input of the next.
27552766
@@ -2872,8 +2883,11 @@ def __init__(
28722883
steps_flat.extend(step.steps)
28732884
else:
28742885
steps_flat.append(coerce_to_runnable(step))
2875-
if len(steps_flat) < 2:
2876-
msg = f"RunnableSequence must have at least 2 steps, got {len(steps_flat)}"
2886+
if len(steps_flat) < _RUNNABLE_SEQUENCE_MIN_STEPS:
2887+
msg = (
2888+
f"RunnableSequence must have at least {_RUNNABLE_SEQUENCE_MIN_STEPS} "
2889+
f"steps, got {len(steps_flat)}"
2890+
)
28772891
raise ValueError(msg)
28782892
super().__init__(
28792893
first=steps_flat[0],
@@ -4477,7 +4491,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod
44774491
# on itemgetter objects, so we have to parse the repr
44784492
items = str(func).replace("operator.itemgetter(", "")[:-1].split(", ")
44794493
if all(
4480-
item[0] == "'" and item[-1] == "'" and len(item) > 2 for item in items
4494+
item[0] == "'" and item[-1] == "'" and item != "''" for item in items
44814495
):
44824496
fields = {item[1:-1]: (Any, ...) for item in items}
44834497
# It's a dict, lol

libs/core/langchain_core/runnables/branch.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@
3636
get_unique_config_specs,
3737
)
3838

39+
_MIN_BRANCHES = 2
40+
3941

4042
class RunnableBranch(RunnableSerializable[Input, Output]):
4143
"""`Runnable` that selects which branch to run based on a condition.
@@ -91,7 +93,7 @@ def __init__(
9193
TypeError: If a branch is not a `tuple` or `list`.
9294
ValueError: If a branch is not of length `2`.
9395
"""
94-
if len(branches) < 2:
96+
if len(branches) < _MIN_BRANCHES:
9597
msg = "RunnableBranch requires at least two branches"
9698
raise ValueError(msg)
9799

@@ -118,7 +120,7 @@ def __init__(
118120
)
119121
raise TypeError(msg)
120122

121-
if len(branch) != 2:
123+
if len(branch) != _MIN_BRANCHES:
122124
msg = (
123125
f"RunnableBranch branches must be "
124126
f"tuples or lists of length 2, not {len(branch)}"

libs/core/langchain_core/runnables/graph_mermaid.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,10 @@ def _render_mermaid_using_api(
454454
return img_bytes
455455

456456
# If we get a server error (5xx), retry
457-
if 500 <= response.status_code < 600 and attempt < max_retries:
457+
if (
458+
requests.codes.internal_server_error <= response.status_code
459+
and attempt < max_retries
460+
):
458461
# Exponential backoff with jitter
459462
sleep_time = retry_delay * (2**attempt) * (0.5 + 0.5 * random.random()) # noqa: S311 not used for crypto
460463
time.sleep(sleep_time)

libs/core/langchain_core/tools/base.py

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -872,16 +872,19 @@ def run(
872872
tool_kwargs |= {config_param: config}
873873
response = context.run(self._run, *tool_args, **tool_kwargs)
874874
if self.response_format == "content_and_artifact":
875-
if not isinstance(response, tuple) or len(response) != 2:
876-
msg = (
877-
"Since response_format='content_and_artifact' "
878-
"a two-tuple of the message content and raw tool output is "
879-
f"expected. Instead generated response of type: "
880-
f"{type(response)}."
881-
)
875+
msg = (
876+
"Since response_format='content_and_artifact' "
877+
"a two-tuple of the message content and raw tool output is "
878+
f"expected. Instead, generated response is of type: "
879+
f"{type(response)}."
880+
)
881+
if not isinstance(response, tuple):
882882
error_to_raise = ValueError(msg)
883883
else:
884-
content, artifact = response
884+
try:
885+
content, artifact = response
886+
except ValueError:
887+
error_to_raise = ValueError(msg)
885888
else:
886889
content = response
887890
except (ValidationError, ValidationErrorV1) as e:
@@ -998,16 +1001,19 @@ async def arun(
9981001
coro = self._arun(*tool_args, **tool_kwargs)
9991002
response = await coro_with_context(coro, context)
10001003
if self.response_format == "content_and_artifact":
1001-
if not isinstance(response, tuple) or len(response) != 2:
1002-
msg = (
1003-
"Since response_format='content_and_artifact' "
1004-
"a two-tuple of the message content and raw tool output is "
1005-
f"expected. Instead generated response of type: "
1006-
f"{type(response)}."
1007-
)
1004+
msg = (
1005+
"Since response_format='content_and_artifact' "
1006+
"a two-tuple of the message content and raw tool output is "
1007+
f"expected. Instead, generated response is of type: "
1008+
f"{type(response)}."
1009+
)
1010+
if not isinstance(response, tuple):
10081011
error_to_raise = ValueError(msg)
10091012
else:
1010-
content, artifact = response
1013+
try:
1014+
content, artifact = response
1015+
except ValueError:
1016+
error_to_raise = ValueError(msg)
10111017
else:
10121018
content = response
10131019
except ValidationError as e:

libs/core/langchain_core/utils/function_calling.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -653,6 +653,9 @@ class Person(BaseModel):
653653
return messages
654654

655655

656+
_MIN_DOCSTRING_BLOCKS = 2
657+
658+
656659
def _parse_google_docstring(
657660
docstring: str | None,
658661
args: list[str],
@@ -671,7 +674,7 @@ def _parse_google_docstring(
671674
arg for arg in args if arg not in {"run_manager", "callbacks", "return"}
672675
}
673676
if filtered_annotations and (
674-
len(docstring_blocks) < 2
677+
len(docstring_blocks) < _MIN_DOCSTRING_BLOCKS
675678
or not any(block.startswith("Args:") for block in docstring_blocks[1:])
676679
):
677680
msg = "Found invalid Google-Style docstring."

libs/core/langchain_core/utils/pydantic.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,8 @@ def get_pydantic_major_version() -> int:
6565
PYDANTIC_MAJOR_VERSION = PYDANTIC_VERSION.major
6666
PYDANTIC_MINOR_VERSION = PYDANTIC_VERSION.minor
6767

68-
IS_PYDANTIC_V1 = PYDANTIC_VERSION.major == 1
69-
IS_PYDANTIC_V2 = PYDANTIC_VERSION.major == 2
68+
IS_PYDANTIC_V1 = False
69+
IS_PYDANTIC_V2 = True
7070

7171
PydanticBaseModel = BaseModel
7272
TypeBaseModel = type[BaseModel]

libs/core/pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,6 @@ ignore = [
104104
"ANN401", # No Any types
105105
"BLE", # Blind exceptions
106106
"ERA", # No commented-out code
107-
"PLR2004", # Comparison to magic number
108107
]
109108
unfixable = [
110109
"B028", # People should intentionally tune the stacklevel
@@ -125,7 +124,7 @@ ignore-var-parameters = true # ignore missing documentation for *args and **kwa
125124
"langchain_core/utils/mustache.py" = [ "PLW0603",]
126125
"langchain_core/sys_info.py" = [ "T201",]
127126
"tests/unit_tests/test_tools.py" = [ "ARG",]
128-
"tests/**" = [ "D1", "S", "SLF",]
127+
"tests/**" = [ "D1", "PLR2004", "S", "SLF",]
129128
"scripts/**" = [ "INP", "S",]
130129

131130
[tool.coverage.run]

0 commit comments

Comments
 (0)