Skip to content

Commit 03e8327

Browse files
cbornetmdrxy
andauthored
core: Ruff preview fixes (#31877)
Auto-fixes from `uv run ruff check --fix --unsafe-fixes --preview` --------- Co-authored-by: Mason Daugherty <[email protected]>
1 parent ba144c9 commit 03e8327

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+267
-271
lines changed

libs/core/langchain_core/_api/deprecation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -469,7 +469,7 @@ def warn_deprecated(
469469

470470
if not message:
471471
message = ""
472-
_package = (
472+
package_ = (
473473
package or name.split(".")[0].replace("_", "-")
474474
if "." in name
475475
else "LangChain"
@@ -483,14 +483,14 @@ def warn_deprecated(
483483
if pending:
484484
message += " will be deprecated in a future version"
485485
else:
486-
message += f" was deprecated in {_package} {since}"
486+
message += f" was deprecated in {package_} {since}"
487487

488488
if removal:
489489
message += f" and will be removed {removal}"
490490

491491
if alternative_import:
492492
alt_package = alternative_import.split(".")[0].replace("_", "-")
493-
if alt_package == _package:
493+
if alt_package == package_:
494494
message += f". Use {alternative_import} instead."
495495
else:
496496
alt_module, alt_name = alternative_import.rsplit(".", 1)

libs/core/langchain_core/caches.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> N
194194
"""
195195
if self._maxsize is not None and len(self._cache) == self._maxsize:
196196
del self._cache[next(iter(self._cache))]
197-
self._cache[(prompt, llm_string)] = return_val
197+
self._cache[prompt, llm_string] = return_val
198198

199199
@override
200200
def clear(self, **kwargs: Any) -> None:

libs/core/langchain_core/document_loaders/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,11 @@ def load_and_split(
6060
)
6161
raise ImportError(msg) from e
6262

63-
_text_splitter: TextSplitter = RecursiveCharacterTextSplitter()
63+
text_splitter_: TextSplitter = RecursiveCharacterTextSplitter()
6464
else:
65-
_text_splitter = text_splitter
65+
text_splitter_ = text_splitter
6666
docs = self.load()
67-
return _text_splitter.split_documents(docs)
67+
return text_splitter_.split_documents(docs)
6868

6969
# Attention: This method will be upgraded into an abstractmethod once it's
7070
# implemented in all the existing subclasses.

libs/core/langchain_core/documents/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -201,14 +201,14 @@ def from_path(
201201
Blob instance
202202
"""
203203
if mime_type is None and guess_type:
204-
_mimetype = mimetypes.guess_type(path)[0] if guess_type else None
204+
mimetype = mimetypes.guess_type(path)[0] if guess_type else None
205205
else:
206-
_mimetype = mime_type
206+
mimetype = mime_type
207207
# We do not load the data immediately, instead we treat the blob as a
208208
# reference to the underlying data.
209209
return cls(
210210
data=None,
211-
mimetype=_mimetype,
211+
mimetype=mimetype,
212212
encoding=encoding,
213213
path=path,
214214
metadata=metadata if metadata is not None else {},

libs/core/langchain_core/indexing/api.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,7 @@ def index(
273273
vector_store: Union[VectorStore, DocumentIndex],
274274
*,
275275
batch_size: int = 100,
276-
cleanup: Literal["incremental", "full", "scoped_full", None] = None,
276+
cleanup: Optional[Literal["incremental", "full", "scoped_full"]] = None,
277277
source_id_key: Union[str, Callable[[Document], str], None] = None,
278278
cleanup_batch_size: int = 1_000,
279279
force_update: bool = False,
@@ -540,10 +540,10 @@ def index(
540540
)
541541
raise AssertionError(msg)
542542

543-
_source_ids = cast("Sequence[str]", source_ids)
543+
source_ids_ = cast("Sequence[str]", source_ids)
544544

545545
while uids_to_delete := record_manager.list_keys(
546-
group_ids=_source_ids, before=index_start_dt, limit=cleanup_batch_size
546+
group_ids=source_ids_, before=index_start_dt, limit=cleanup_batch_size
547547
):
548548
# Then delete from vector store.
549549
_delete(destination, uids_to_delete)
@@ -609,7 +609,7 @@ async def aindex(
609609
vector_store: Union[VectorStore, DocumentIndex],
610610
*,
611611
batch_size: int = 100,
612-
cleanup: Literal["incremental", "full", "scoped_full", None] = None,
612+
cleanup: Optional[Literal["incremental", "full", "scoped_full"]] = None,
613613
source_id_key: Union[str, Callable[[Document], str], None] = None,
614614
cleanup_batch_size: int = 1_000,
615615
force_update: bool = False,
@@ -881,10 +881,10 @@ async def aindex(
881881
)
882882
raise AssertionError(msg)
883883

884-
_source_ids = cast("Sequence[str]", source_ids)
884+
source_ids_ = cast("Sequence[str]", source_ids)
885885

886886
while uids_to_delete := await record_manager.alist_keys(
887-
group_ids=_source_ids, before=index_start_dt, limit=cleanup_batch_size
887+
group_ids=source_ids_, before=index_start_dt, limit=cleanup_batch_size
888888
):
889889
# Then delete from vector store.
890890
await _adelete(destination, uids_to_delete)

libs/core/langchain_core/language_models/_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def _normalize_messages(messages: Sequence[BaseMessage]) -> list[BaseMessage]:
123123
# Subset to (PDF) files and audio, as most relevant chat models
124124
# support images in OAI format (and some may not yet support the
125125
# standard data block format)
126-
and block.get("type") in ("file", "input_audio")
126+
and block.get("type") in {"file", "input_audio"}
127127
and _is_openai_data_block(block)
128128
):
129129
if formatted_message is message:

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1263,8 +1263,8 @@ def predict(
12631263
Returns:
12641264
The predicted output string.
12651265
"""
1266-
_stop = None if stop is None else list(stop)
1267-
result = self([HumanMessage(content=text)], stop=_stop, **kwargs)
1266+
stop_ = None if stop is None else list(stop)
1267+
result = self([HumanMessage(content=text)], stop=stop_, **kwargs)
12681268
if isinstance(result.content, str):
12691269
return result.content
12701270
msg = "Cannot use predict when output is not a string."
@@ -1279,17 +1279,17 @@ def predict_messages(
12791279
stop: Optional[Sequence[str]] = None,
12801280
**kwargs: Any,
12811281
) -> BaseMessage:
1282-
_stop = None if stop is None else list(stop)
1283-
return self(messages, stop=_stop, **kwargs)
1282+
stop_ = None if stop is None else list(stop)
1283+
return self(messages, stop=stop_, **kwargs)
12841284

12851285
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
12861286
@override
12871287
async def apredict(
12881288
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
12891289
) -> str:
1290-
_stop = None if stop is None else list(stop)
1290+
stop_ = None if stop is None else list(stop)
12911291
result = await self._call_async(
1292-
[HumanMessage(content=text)], stop=_stop, **kwargs
1292+
[HumanMessage(content=text)], stop=stop_, **kwargs
12931293
)
12941294
if isinstance(result.content, str):
12951295
return result.content
@@ -1305,8 +1305,8 @@ async def apredict_messages(
13051305
stop: Optional[Sequence[str]] = None,
13061306
**kwargs: Any,
13071307
) -> BaseMessage:
1308-
_stop = None if stop is None else list(stop)
1309-
return await self._call_async(messages, stop=_stop, **kwargs)
1308+
stop_ = None if stop is None else list(stop)
1309+
return await self._call_async(messages, stop=stop_, **kwargs)
13101310

13111311
@property
13121312
@abstractmethod

libs/core/langchain_core/language_models/llms.py

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -93,10 +93,10 @@ def create_base_retry_decorator(
9393
Raises:
9494
ValueError: If the cache is not set and cache is True.
9595
"""
96-
_logging = before_sleep_log(logger, logging.WARNING)
96+
logging_ = before_sleep_log(logger, logging.WARNING)
9797

9898
def _before_sleep(retry_state: RetryCallState) -> None:
99-
_logging(retry_state)
99+
logging_(retry_state)
100100
if run_manager:
101101
if isinstance(run_manager, AsyncCallbackManagerForLLMRun):
102102
coro = run_manager.on_retry(retry_state)
@@ -119,7 +119,7 @@ def _before_sleep(retry_state: RetryCallState) -> None:
119119
# 4 seconds, then up to 10 seconds, then 10 seconds afterwards
120120
retry_instance: retry_base = retry_if_exception_type(error_types[0])
121121
for error in error_types[1:]:
122-
retry_instance = retry_instance | retry_if_exception_type(error)
122+
retry_instance |= retry_if_exception_type(error)
123123
return retry(
124124
reraise=True,
125125
stop=stop_after_attempt(max_retries),
@@ -155,7 +155,7 @@ def _resolve_cache(*, cache: Union[BaseCache, bool, None]) -> Optional[BaseCache
155155
def get_prompts(
156156
params: dict[str, Any],
157157
prompts: list[str],
158-
cache: Optional[Union[BaseCache, bool, None]] = None, # noqa: FBT001
158+
cache: Union[BaseCache, bool, None] = None, # noqa: FBT001
159159
) -> tuple[dict[int, list], str, list[int], list[str]]:
160160
"""Get prompts that are already cached.
161161
@@ -191,7 +191,7 @@ def get_prompts(
191191
async def aget_prompts(
192192
params: dict[str, Any],
193193
prompts: list[str],
194-
cache: Optional[Union[BaseCache, bool, None]] = None, # noqa: FBT001
194+
cache: Union[BaseCache, bool, None] = None, # noqa: FBT001
195195
) -> tuple[dict[int, list], str, list[int], list[str]]:
196196
"""Get prompts that are already cached. Async version.
197197
@@ -877,8 +877,6 @@ def generate(
877877
**(metadata or {}),
878878
**self._get_ls_params(stop=stop, **kwargs),
879879
}
880-
else:
881-
pass
882880
if (
883881
isinstance(callbacks, list)
884882
and callbacks
@@ -1132,8 +1130,6 @@ async def agenerate(
11321130
**(metadata or {}),
11331131
**self._get_ls_params(stop=stop, **kwargs),
11341132
}
1135-
else:
1136-
pass
11371133
# Create callback managers
11381134
if isinstance(callbacks, list) and (
11391135
isinstance(callbacks[0], (list, BaseCallbackManager))
@@ -1352,8 +1348,8 @@ async def _call_async(
13521348
def predict(
13531349
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
13541350
) -> str:
1355-
_stop = None if stop is None else list(stop)
1356-
return self(text, stop=_stop, **kwargs)
1351+
stop_ = None if stop is None else list(stop)
1352+
return self(text, stop=stop_, **kwargs)
13571353

13581354
@deprecated("0.1.7", alternative="invoke", removal="1.0")
13591355
@override
@@ -1365,17 +1361,17 @@ def predict_messages(
13651361
**kwargs: Any,
13661362
) -> BaseMessage:
13671363
text = get_buffer_string(messages)
1368-
_stop = None if stop is None else list(stop)
1369-
content = self(text, stop=_stop, **kwargs)
1364+
stop_ = None if stop is None else list(stop)
1365+
content = self(text, stop=stop_, **kwargs)
13701366
return AIMessage(content=content)
13711367

13721368
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
13731369
@override
13741370
async def apredict(
13751371
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
13761372
) -> str:
1377-
_stop = None if stop is None else list(stop)
1378-
return await self._call_async(text, stop=_stop, **kwargs)
1373+
stop_ = None if stop is None else list(stop)
1374+
return await self._call_async(text, stop=stop_, **kwargs)
13791375

13801376
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
13811377
@override
@@ -1387,8 +1383,8 @@ async def apredict_messages(
13871383
**kwargs: Any,
13881384
) -> BaseMessage:
13891385
text = get_buffer_string(messages)
1390-
_stop = None if stop is None else list(stop)
1391-
content = await self._call_async(text, stop=_stop, **kwargs)
1386+
stop_ = None if stop is None else list(stop)
1387+
content = await self._call_async(text, stop=stop_, **kwargs)
13921388
return AIMessage(content=content)
13931389

13941390
def __str__(self) -> str:

libs/core/langchain_core/load/serializable.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -355,19 +355,19 @@ def to_json_not_implemented(obj: object) -> SerializedNotImplemented:
355355
Returns:
356356
SerializedNotImplemented
357357
"""
358-
_id: list[str] = []
358+
id_: list[str] = []
359359
try:
360360
if hasattr(obj, "__name__"):
361-
_id = [*obj.__module__.split("."), obj.__name__]
361+
id_ = [*obj.__module__.split("."), obj.__name__]
362362
elif hasattr(obj, "__class__"):
363-
_id = [*obj.__class__.__module__.split("."), obj.__class__.__name__]
363+
id_ = [*obj.__class__.__module__.split("."), obj.__class__.__name__]
364364
except Exception:
365365
logger.debug("Failed to serialize object", exc_info=True)
366366

367367
result: SerializedNotImplemented = {
368368
"lc": 1,
369369
"type": "not_implemented",
370-
"id": _id,
370+
"id": id_,
371371
"repr": None,
372372
}
373373
with contextlib.suppress(Exception):

libs/core/langchain_core/messages/tool.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -126,8 +126,6 @@ def coerce_args(cls, values: dict) -> dict:
126126
raise ValueError(msg) from e
127127
else:
128128
values["content"].append(x)
129-
else:
130-
pass
131129

132130
tool_call_id = values["tool_call_id"]
133131
if isinstance(tool_call_id, (UUID, int, float)):
@@ -366,4 +364,4 @@ def default_tool_chunk_parser(raw_tool_calls: list[dict]) -> list[ToolCallChunk]
366364
def _merge_status(
367365
left: Literal["success", "error"], right: Literal["success", "error"]
368366
) -> Literal["success", "error"]:
369-
return "error" if "error" in (left, right) else "success"
367+
return "error" if "error" in {left, right} else "success"

0 commit comments

Comments
 (0)