Skip to content

Commit 0d47990

Browse files
Merge branch 'main' into main
2 parents c184ea5 + e97a56e commit 0d47990

File tree

26 files changed

+138
-234
lines changed

26 files changed

+138
-234
lines changed

doc/source/whatsnew/v0.21.0.rst

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -635,22 +635,17 @@ Previous behavior:
635635
636636
New behavior:
637637

638-
.. code-block:: ipython
638+
.. ipython:: python
639639
640-
In [1]: pi = pd.period_range('2017-01', periods=12, freq='M')
640+
pi = pd.period_range('2017-01', periods=12, freq='M')
641641
642-
In [2]: s = pd.Series(np.arange(12), index=pi)
642+
s = pd.Series(np.arange(12), index=pi)
643643
644-
In [3]: resampled = s.resample('2Q').mean()
644+
resampled = s.resample('2Q').mean()
645645
646-
In [4]: resampled
647-
Out[4]:
648-
2017Q1 2.5
649-
2017Q3 8.5
650-
Freq: 2Q-DEC, dtype: float64
646+
resampled
651647
652-
In [5]: resampled.index
653-
Out[5]: PeriodIndex(['2017Q1', '2017Q3'], dtype='period[2Q-DEC]')
648+
resampled.index
654649
655650
Upsampling and calling ``.ohlc()`` previously returned a ``Series``, basically identical to calling ``.asfreq()``. OHLC upsampling now returns a DataFrame with columns ``open``, ``high``, ``low`` and ``close`` (:issue:`13083`). This is consistent with downsampling and ``DatetimeIndex`` behavior.
656651

doc/source/whatsnew/v2.2.0.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -664,7 +664,7 @@ Other Deprecations
664664
- Deprecated :meth:`DatetimeArray.__init__` and :meth:`TimedeltaArray.__init__`, use :func:`array` instead (:issue:`55623`)
665665
- Deprecated :meth:`Index.format`, use ``index.astype(str)`` or ``index.map(formatter)`` instead (:issue:`55413`)
666666
- Deprecated :meth:`Series.ravel`, the underlying array is already 1D, so ravel is not necessary (:issue:`52511`)
667-
- Deprecated :meth:`Series.resample` and :meth:`DataFrame.resample` with a :class:`PeriodIndex` (and the 'convention' keyword), convert to :class:`DatetimeIndex` (with ``.to_timestamp()``) before resampling instead (:issue:`53481`)
667+
- Deprecated :meth:`Series.resample` and :meth:`DataFrame.resample` with a :class:`PeriodIndex` (and the 'convention' keyword), convert to :class:`DatetimeIndex` (with ``.to_timestamp()``) before resampling instead (:issue:`53481`). Note: this deprecation was later undone in pandas 2.3.3 (:issue:`57033`)
668668
- Deprecated :meth:`Series.view`, use :meth:`Series.astype` instead to change the dtype (:issue:`20251`)
669669
- Deprecated :meth:`offsets.Tick.is_anchored`, use ``False`` instead (:issue:`55388`)
670670
- Deprecated ``core.internals`` members ``Block``, ``ExtensionBlock``, and ``DatetimeTZBlock``, use public APIs instead (:issue:`55139`)

doc/source/whatsnew/v2.3.3.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,14 @@ Bug fixes
5757

5858
- The :meth:`DataFrame.iloc` now works correctly with ``copy_on_write`` option when assigning values after subsetting the columns of a homogeneous DataFrame (:issue:`60309`)
5959

60+
Other changes
61+
~~~~~~~~~~~~~
62+
63+
- The deprecation of using :meth:`Series.resample` and :meth:`DataFrame.resample`
64+
with a :class:`PeriodIndex` (and the 'convention' keyword) has been undone.
65+
Resampling with a :class:`PeriodIndex` is supported again, but a subset of
66+
methods that return incorrect results will raise an error in pandas 3.0 (:issue:`57033`)
67+
6068

6169
.. ---------------------------------------------------------------------------
6270
.. _whatsnew_233.contributors:

pandas/_typing.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,7 @@
8383

8484
# numpy compatible types
8585
NumpyValueArrayLike: TypeAlias = ScalarLike_co | npt.ArrayLike
86-
# Name "npt._ArrayLikeInt_co" is not defined [name-defined]
87-
NumpySorter: TypeAlias = npt._ArrayLikeInt_co | None # type: ignore[name-defined]
86+
NumpySorter: TypeAlias = npt._ArrayLikeInt_co | None
8887

8988

9089
P = ParamSpec("P")

pandas/core/algorithms.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -215,14 +215,15 @@ def _reconstruct_data(
215215
# that values.dtype == dtype
216216
cls = dtype.construct_array_type()
217217

218-
# error: Incompatible types in assignment (expression has type
219-
# "ExtensionArray", variable has type "ndarray[Any, Any]")
220-
values = cls._from_sequence(values, dtype=dtype) # type: ignore[assignment]
221-
222-
else:
223-
values = values.astype(dtype, copy=False)
224-
225-
return values
218+
# error: Incompatible return value type
219+
# (got "ExtensionArray",
220+
# expected "ndarray[tuple[Any, ...], dtype[Any]]")
221+
return cls._from_sequence(values, dtype=dtype) # type: ignore[return-value]
222+
223+
# error: Incompatible return value type
224+
# (got "ndarray[tuple[Any, ...], dtype[Any]]",
225+
# expected "ExtensionArray")
226+
return values.astype(dtype, copy=False) # type: ignore[return-value]
226227

227228

228229
def _ensure_arraylike(values, func_name: str) -> ArrayLike:

pandas/core/array_algos/quantile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def quantile_with_mask(
102102
interpolation=interpolation,
103103
)
104104

105-
result = np.asarray(result) # type: ignore[assignment]
105+
result = np.asarray(result)
106106
result = result.T
107107

108108
return result

pandas/core/arrays/_mixins.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,9 @@ def view(self, dtype: Dtype | None = None) -> ArrayLike:
151151

152152
td64_values = arr.view(dtype)
153153
return TimedeltaArray._simple_new(td64_values, dtype=dtype)
154-
return arr.view(dtype=dtype)
154+
# error: Argument "dtype" to "view" of "ndarray" has incompatible type
155+
# "ExtensionDtype | dtype[Any]"; expected "dtype[Any] | _HasDType[dtype[Any]]"
156+
return arr.view(dtype=dtype) # type: ignore[arg-type]
155157

156158
def take(
157159
self,

pandas/core/arrays/arrow/_arrow_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def pyarrow_array_to_numpy_and_mask(
4444
mask = pyarrow.BooleanArray.from_buffers(
4545
pyarrow.bool_(), len(arr), [None, bitmask], offset=arr.offset
4646
)
47-
mask = np.asarray(mask) # type: ignore[assignment]
47+
mask = np.asarray(mask)
4848
else:
4949
mask = np.ones(len(arr), dtype=bool)
5050
return data, mask

pandas/core/arrays/arrow/array.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -657,7 +657,7 @@ def _box_pa_array(
657657
):
658658
arr_value = np.asarray(value, dtype=object)
659659
# similar to isna(value) but exclude NaN, NaT, nat-like, nan-like
660-
mask = is_pdna_or_none(arr_value) # type: ignore[assignment]
660+
mask = is_pdna_or_none(arr_value)
661661

662662
try:
663663
pa_array = pa.array(value, type=pa_type, mask=mask)
@@ -2738,7 +2738,7 @@ def _str_get_dummies(self, sep: str = "|", dtype: NpDtype | None = None):
27382738
dummies_dtype = np.bool_
27392739
dummies = np.zeros(n_rows * n_cols, dtype=dummies_dtype)
27402740
dummies[indices] = True
2741-
dummies = dummies.reshape((n_rows, n_cols)) # type: ignore[assignment]
2741+
dummies = dummies.reshape((n_rows, n_cols))
27422742
result = self._from_pyarrow_array(pa.array(list(dummies)))
27432743
return result, uniques_sorted.to_pylist()
27442744

pandas/core/arrays/categorical.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1869,7 +1869,7 @@ def value_counts(self, dropna: bool = True) -> Series:
18691869
count = np.bincount(obs, minlength=ncat or 0)
18701870
else:
18711871
count = np.bincount(np.where(mask, code, ncat))
1872-
ix = np.append(ix, -1) # type: ignore[assignment]
1872+
ix = np.append(ix, -1)
18731873

18741874
ix = coerce_indexer_dtype(ix, self.dtype.categories)
18751875
ix_categorical = self._from_backing_data(ix)

0 commit comments

Comments
 (0)