Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions pandas/core/internals/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,7 +564,7 @@ def convert_dtypes(
rbs.append(blk.copy(deep=False))
continue

for dtype, b in zip(dtypes, sub_blks):
for dtype, b in zip(dtypes, sub_blks, strict=True):
rbs.append(b.astype(dtype=dtype, squeeze=b.ndim != 1))
return rbs

Expand Down Expand Up @@ -803,7 +803,7 @@ def replace_list(
# Exclude anything that we know we won't contain
pairs = [
(x, y)
for x, y in zip(src_list, dest_list)
for x, y in zip(src_list, dest_list, strict=False)
if (self._can_hold_element(x) or (self.dtype == "string" and is_re(x)))
]
if not pairs:
Expand Down Expand Up @@ -833,7 +833,7 @@ def replace_list(
# references when we check again later
rb = [self]

for i, ((src, dest), mask) in enumerate(zip(pairs, masks)):
for i, ((src, dest), mask) in enumerate(zip(pairs, masks, strict=True)):
new_rb: list[Block] = []

# GH-39338: _replace_coerce can split a block into
Expand Down Expand Up @@ -2117,7 +2117,9 @@ def _unstack(
BlockPlacement(place),
ndim=2,
)
for i, (indices, place) in enumerate(zip(new_values, new_placement))
for i, (indices, place) in enumerate(
zip(new_values, new_placement, strict=True)
)
]
return blocks, mask

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/internals/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def ndarray_to_mgr(
for x in obj_columns
]
# don't convert (and copy) the objects if no type inference occurs
if any(x is not y for x, y in zip(obj_columns, maybe_datetime)):
if any(x is not y for x, y in zip(obj_columns, maybe_datetime, strict=True)):
block_values = [
new_block_2d(ensure_block_shape(dval, 2), placement=BlockPlacement(n))
for n, dval in enumerate(maybe_datetime)
Expand Down
10 changes: 6 additions & 4 deletions pandas/core/internals/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,9 @@ def equals(self, other: object) -> bool:
self_axes, other_axes = self.axes, other.axes
if len(self_axes) != len(other_axes):
return False
if not all(ax1.equals(ax2) for ax1, ax2 in zip(self_axes, other_axes)):
if not all(
ax1.equals(ax2) for ax1, ax2 in zip(self_axes, other_axes, strict=True)
):
return False

return self._equal_values(other)
Expand Down Expand Up @@ -989,7 +991,7 @@ def _slice_take_blocks_ax0(
elif only_slice:
# GH#33597 slice instead of take, so we get
# views instead of copies
for i, ml in zip(taker, mgr_locs):
for i, ml in zip(taker, mgr_locs, strict=False):
slc = slice(i, i + 1)
bp = BlockPlacement(ml)
nb = blk.getitem_block_columns(slc, new_mgr_locs=bp)
Expand Down Expand Up @@ -2411,12 +2413,12 @@ def _tuples_to_blocks_no_consolidate(tuples, refs) -> list[Block]:
new_block_2d(
ensure_block_shape(arr, ndim=2), placement=BlockPlacement(i), refs=ref
)
for ((i, arr), ref) in zip(tuples, refs)
for ((i, arr), ref) in zip(tuples, refs, strict=True)
]


def _stack_arrays(tuples, dtype: np.dtype):
placement, arrays = zip(*tuples)
placement, arrays = zip(*tuples, strict=True)

first = arrays[0]
shape = (len(arrays),) + first.shape
Expand Down
3 changes: 0 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -465,9 +465,6 @@ exclude = [
"pandas/core/indexes/interval.py" = ["B905"]
"pandas/core/indexes/multi.py" = ["B905"]
"pandas/core/indexing.py" = ["B905"]
"pandas/core/internals/blocks.py" = ["B905"]
"pandas/core/internals/construction.py" = ["B905"]
"pandas/core/internals/managers.py" = ["B905"]
"pandas/core/methods/to_dict.py" = ["B905"]
"pandas/core/reshape/concat.py" = ["B905"]
"pandas/core/reshape/encoding.py" = ["B905"]
Expand Down
Loading