Skip to content

[pre-commit.ci] pre-commit autoupdate #59419

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ ci:
skip: [pyright, mypy]
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.0
rev: v0.5.6
hooks:
- id: ruff
args: [--exit-non-zero-on-fix]
Expand Down Expand Up @@ -73,7 +73,7 @@ repos:
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.16.0
rev: v3.17.0
hooks:
- id: pyupgrade
args: [--py310-plus]
Expand Down
4 changes: 2 additions & 2 deletions pandas/_config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ def __dir__(self) -> list[str]:


@contextmanager
def option_context(*args) -> Generator[None, None, None]:
def option_context(*args) -> Generator[None]:
"""
Context manager to temporarily set options in a ``with`` statement.

Expand Down Expand Up @@ -713,7 +713,7 @@ def _build_option_description(k: str) -> str:


@contextmanager
def config_prefix(prefix: str) -> Generator[None, None, None]:
def config_prefix(prefix: str) -> Generator[None]:
"""
contextmanager for multiple invocations of API with a common prefix

Expand Down
2 changes: 1 addition & 1 deletion pandas/_config/localization.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
@contextmanager
def set_locale(
new_locale: str | tuple[str, str], lc_var: int = locale.LC_ALL
) -> Generator[str | tuple[str, str], None, None]:
) -> Generator[str | tuple[str, str]]:
"""
Context manager for temporarily setting a locale.

Expand Down
2 changes: 1 addition & 1 deletion pandas/_testing/_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def assert_produces_warning(
raise_on_extra_warnings: bool = True,
match: str | tuple[str | None, ...] | None = None,
must_find_all_warnings: bool = True,
) -> Generator[list[warnings.WarningMessage], None, None]:
) -> Generator[list[warnings.WarningMessage]]:
"""
Context manager for running code expected to either raise a specific warning,
multiple specific warnings, or not raise any warnings. Verifies that the code
Expand Down
8 changes: 4 additions & 4 deletions pandas/_testing/contexts.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
@contextmanager
def decompress_file(
path: FilePath | BaseBuffer, compression: CompressionOptions
) -> Generator[IO[bytes], None, None]:
) -> Generator[IO[bytes]]:
"""
Open a compressed file and return a file object.

Expand All @@ -50,7 +50,7 @@ def decompress_file(


@contextmanager
def set_timezone(tz: str) -> Generator[None, None, None]:
def set_timezone(tz: str) -> Generator[None]:
"""
Context manager for temporarily setting a timezone.

Expand Down Expand Up @@ -91,7 +91,7 @@ def setTZ(tz) -> None:


@contextmanager
def ensure_clean(filename=None) -> Generator[Any, None, None]:
def ensure_clean(filename=None) -> Generator[Any]:
"""
Gets a temporary path and agrees to remove on close.

Expand Down Expand Up @@ -123,7 +123,7 @@ def ensure_clean(filename=None) -> Generator[Any, None, None]:


@contextmanager
def with_csv_dialect(name: str, **kwargs) -> Generator[None, None, None]:
def with_csv_dialect(name: str, **kwargs) -> Generator[None]:
"""
Context manager to temporarily register a CSV dialect for parsing CSV.

Expand Down
2 changes: 1 addition & 1 deletion pandas/compat/pickle_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def loads(


@contextlib.contextmanager
def patch_pickle() -> Generator[None, None, None]:
def patch_pickle() -> Generator[None]:
"""
Temporarily patch pickle to use our unpickler.
"""
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -806,7 +806,7 @@ def result_columns(self) -> Index:

@property
@abc.abstractmethod
def series_generator(self) -> Generator[Series, None, None]:
def series_generator(self) -> Generator[Series]:
pass

@staticmethod
Expand Down Expand Up @@ -1131,7 +1131,7 @@ class FrameRowApply(FrameApply):
axis: AxisInt = 0

@property
def series_generator(self) -> Generator[Series, None, None]:
def series_generator(self) -> Generator[Series]:
return (self.obj._ixs(i, axis=1) for i in range(len(self.columns)))

@staticmethod
Expand Down Expand Up @@ -1243,7 +1243,7 @@ def apply_broadcast(self, target: DataFrame) -> DataFrame:
return result.T

@property
def series_generator(self) -> Generator[Series, None, None]:
def series_generator(self) -> Generator[Series]:
values = self.values
values = ensure_wrapped_if_datetimelike(values)
assert len(values) > 0
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2906,7 +2906,7 @@ def _generate_range(
offset: BaseOffset,
*,
unit: str,
) -> Generator[Timestamp, None, None]:
) -> Generator[Timestamp]:
"""
Generates a sequence of dates corresponding to the specified time
offset. Similar to dateutil.rrule except uses pandas DateOffset
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,7 @@ def convert_to_list_like(
@contextlib.contextmanager
def temp_setattr(
obj, attr: str, value, condition: bool = True
) -> Generator[None, None, None]:
) -> Generator[None]:
"""
Temporarily set attribute on an object.

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/groupby/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -865,7 +865,7 @@ def _unob_index_and_ids(
return unob_index, unob_ids

@final
def get_group_levels(self) -> Generator[Index, None, None]:
def get_group_levels(self) -> Generator[Index]:
# Note: only called from _insert_inaxis_grouper, which
# is only called for BaseGrouper, never for BinGrouper
result_index = self.result_index
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2623,7 +2623,7 @@ def _reorder_ilevels(self, order) -> MultiIndex:

def _recode_for_new_levels(
self, new_levels, copy: bool = True
) -> Generator[np.ndarray, None, None]:
) -> Generator[np.ndarray]:
if len(new_levels) > self.nlevels:
raise AssertionError(
f"Length of new_levels ({len(new_levels)}) "
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/internals/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def _split_op_result(self, result: ArrayLike) -> list[Block]:
return [nb]

@final
def _split(self) -> Generator[Block, None, None]:
def _split(self) -> Generator[Block]:
"""
Split a block into a list of single-column blocks.
"""
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/internals/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def _concat_homogeneous_fastpath(

def _get_combined_plan(
mgrs: list[BlockManager],
) -> Generator[tuple[BlockPlacement, list[JoinUnit]], None, None]:
) -> Generator[tuple[BlockPlacement, list[JoinUnit]]]:
max_len = mgrs[0].shape[0]

blknos_list = [mgr.blknos for mgr in mgrs]
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/internals/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -856,7 +856,7 @@ def _slice_take_blocks_ax0(
*,
use_na_proxy: bool = False,
ref_inplace_op: bool = False,
) -> Generator[Block, None, None]:
) -> Generator[Block]:
"""
Slice/take blocks along axis=0.

Expand Down Expand Up @@ -1731,7 +1731,7 @@ def unstack(self, unstacker, fill_value) -> BlockManager:
bm = BlockManager(new_blocks, [new_columns, new_index], verify_integrity=False)
return bm

def to_iter_dict(self) -> Generator[tuple[str, Self], None, None]:
def to_iter_dict(self) -> Generator[tuple[str, Self]]:
"""
Yield a tuple of (str(dtype), BlockManager)

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/methods/to_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

def create_data_for_split(
df: DataFrame, are_all_object_dtype_cols: bool, object_dtype_indices: list[int]
) -> Generator[list, None, None]:
) -> Generator[list]:
"""
Simple helper method to create data for to ``to_dict(orient="split")``
to create the main output data
Expand Down
6 changes: 3 additions & 3 deletions pandas/io/formats/css.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def _side_expander(prop_fmt: str) -> Callable:

def expand(
self: CSSResolver, prop: str, value: str
) -> Generator[tuple[str, str], None, None]:
) -> Generator[tuple[str, str]]:
"""
Expand shorthand property into side-specific property (top, right, bottom, left)

Expand Down Expand Up @@ -83,7 +83,7 @@ def _border_expander(side: str = "") -> Callable:

def expand(
self: CSSResolver, prop: str, value: str
) -> Generator[tuple[str, str], None, None]:
) -> Generator[tuple[str, str]]:
"""
Expand border into color, style, and width tuples

Expand Down Expand Up @@ -392,7 +392,7 @@ def _error() -> str:
size_fmt = f"{val:f}pt"
return size_fmt

def atomize(self, declarations: Iterable) -> Generator[tuple[str, str], None, None]:
def atomize(self, declarations: Iterable) -> Generator[tuple[str, str]]:
for prop, value in declarations:
prop = prop.lower()
value = value.lower()
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/formats/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -1024,7 +1024,7 @@ def save_to_buffer(
@contextmanager
def _get_buffer(
buf: FilePath | WriteBuffer[str] | None, encoding: str | None = None
) -> Generator[WriteBuffer[str], None, None] | Generator[StringIO, None, None]:
) -> Generator[WriteBuffer[str]] | Generator[StringIO]:
"""
Context manager to open, yield and close buffer for filenames or Path-like
objects, otherwise yield buf unchanged.
Expand Down
6 changes: 3 additions & 3 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1116,7 +1116,7 @@ def _query_iterator(
coerce_float: bool = True,
parse_dates=None,
dtype_backend: DtypeBackend | Literal["numpy"] = "numpy",
) -> Generator[DataFrame, None, None]:
) -> Generator[DataFrame]:
"""Return generator through chunked result set."""
has_read_data = False
with exit_stack:
Expand Down Expand Up @@ -1728,7 +1728,7 @@ def _query_iterator(
parse_dates=None,
dtype: DtypeArg | None = None,
dtype_backend: DtypeBackend | Literal["numpy"] = "numpy",
) -> Generator[DataFrame, None, None]:
) -> Generator[DataFrame]:
"""Return generator through chunked result set"""
has_read_data = False
with exit_stack:
Expand Down Expand Up @@ -2677,7 +2677,7 @@ def _query_iterator(
parse_dates=None,
dtype: DtypeArg | None = None,
dtype_backend: DtypeBackend | Literal["numpy"] = "numpy",
) -> Generator[DataFrame, None, None]:
) -> Generator[DataFrame]:
"""Return generator through chunked result set"""
has_read_data = False
while True:
Expand Down
2 changes: 1 addition & 1 deletion pandas/plotting/_matplotlib/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def wrapper(*args, **kwargs):


@contextlib.contextmanager
def pandas_converters() -> Generator[None, None, None]:
def pandas_converters() -> Generator[None]:
"""
Context manager registering pandas' converters for a plot.

Expand Down
2 changes: 1 addition & 1 deletion pandas/plotting/_matplotlib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,7 @@ def handle_shared_axes(
_remove_labels_from_axis(ax.yaxis)


def flatten_axes(axes: Axes | Iterable[Axes]) -> Generator[Axes, None, None]:
def flatten_axes(axes: Axes | Iterable[Axes]) -> Generator[Axes]:
if not is_list_like(axes):
yield axes # type: ignore[misc]
elif isinstance(axes, (np.ndarray, ABCIndex)):
Expand Down
2 changes: 1 addition & 1 deletion pandas/plotting/_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,7 @@ def _get_canonical_key(self, key: str) -> str:
return self._ALIASES.get(key, key)

@contextmanager
def use(self, key, value) -> Generator[_Options, None, None]:
def use(self, key, value) -> Generator[_Options]:
"""
Temporarily set a parameter value using the with statement.
Aliasing allowed.
Expand Down
4 changes: 2 additions & 2 deletions pandas/util/_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


@contextlib.contextmanager
def rewrite_exception(old_name: str, new_name: str) -> Generator[None, None, None]:
def rewrite_exception(old_name: str, new_name: str) -> Generator[None]:
"""
Rewrite the message of an exception.
"""
Expand Down Expand Up @@ -66,7 +66,7 @@ def rewrite_warning(
target_category: type[Warning],
new_message: str,
new_category: type[Warning] | None = None,
) -> Generator[None, None, None]:
) -> Generator[None]:
"""
Rewrite the message of a warning.

Expand Down
Loading