Skip to content

Commit 03a1998

Browse files
committed
Revert "Address new ruff checks"
This reverts commit 50481ca.
1 parent 50481ca commit 03a1998

File tree

3 files changed

+52
-64
lines changed

3 files changed

+52
-64
lines changed

doc/source/user_guide/style.ipynb

Lines changed: 30 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@
100100
"outputs": [],
101101
"source": [
102102
"weather_df = pd.DataFrame(\n",
103-
" np.random.default_rng(2).standard_normal(10, 2) * 5,\n",
103+
" np.random.rand(10, 2) * 5,\n",
104104
" index=pd.date_range(start=\"2021-01-01\", periods=10),\n",
105105
" columns=[\"Tokyo\", \"Beijing\"],\n",
106106
")\n",
@@ -157,7 +157,7 @@
157157
"metadata": {},
158158
"outputs": [],
159159
"source": [
160-
"df = pd.DataFrame(np.random.default_rng(2).standard_normal(5, 5))\n",
160+
"df = pd.DataFrame(np.random.randn(5, 5))\n",
161161
"df.style.hide(subset=[0, 2, 4], axis=0).hide(subset=[0, 2, 4], axis=1)"
162162
]
163163
},
@@ -302,16 +302,9 @@
302302
" columns=df.columns,\n",
303303
" ),\n",
304304
" css_class=\"pd-tt\",\n",
305-
" props=\"visibility: hidden;\"\n",
306-
" \"position: absolute;\"\n",
307-
" \"z-index: 1;\"\n",
308-
" \"border: 1px solid #000066;\"\n",
309-
" \"background-color: white;\"\n",
310-
" \"color: #000066;\"\n",
311-
" \"font-size: 0.8em;\"\n",
312-
" \"transform: translate(0px, -24px);\"\n",
313-
" \"padding: 0.6em;\"\n",
314-
" \"border-radius: 0.5em;\",\n",
305+
" props=\"visibility: hidden; position: absolute; z-index: 1; border: 1px solid #000066;\"\n",
306+
" \"background-color: white; color: #000066; font-size: 0.8em;\"\n",
307+
" \"transform: translate(0px, -24px); padding: 0.6em; border-radius: 0.5em;\",\n",
315308
" )\n",
316309
")"
317310
]
@@ -609,9 +602,8 @@
609602
"metadata": {},
610603
"outputs": [],
611604
"source": [
612-
"df2 = pd.DataFrame(\n",
613-
" np.random.default_rng(2).standard_normal(10, 4), columns=[\"A\", \"B\", \"C\", \"D\"]\n",
614-
")\n",
605+
"np.random.seed(0)\n",
606+
"df2 = pd.DataFrame(np.random.randn(10, 4), columns=[\"A\", \"B\", \"C\", \"D\"])\n",
615607
"df2.style"
616608
]
617609
},
@@ -820,14 +812,9 @@
820812
")\n",
821813
"s.set_tooltips(\n",
822814
" tt,\n",
823-
" props=\"visibility: hidden;\"\n",
824-
" \"position: absolute; z-index: 1;\"\n",
825-
" \"border: 1px solid #000066;\"\n",
826-
" \"background-color: white;\"\n",
827-
" \"color: #000066;\"\n",
828-
" \"font-size: 0.8em;\"\n",
829-
" \"transform: translate(0px, -24px);\"\n",
830-
" \"padding: 0.6em; border-radius: 0.5em;\",\n",
815+
" props=\"visibility: hidden; position: absolute; z-index: 1; border: 1px solid #000066;\"\n",
816+
" \"background-color: white; color: #000066; font-size: 0.8em;\"\n",
817+
" \"transform: translate(0px, -24px); padding: 0.6em; border-radius: 0.5em;\",\n",
831818
")"
832819
]
833820
},
@@ -907,7 +894,7 @@
907894
"outputs": [],
908895
"source": [
909896
"df3 = pd.DataFrame(\n",
910-
" np.random.default_rng(2).standard_normal(4, 4),\n",
897+
" np.random.randn(4, 4),\n",
911898
" pd.MultiIndex.from_product([[\"A\", \"B\"], [\"r1\", \"r2\"]]),\n",
912899
" columns=[\"c1\", \"c2\", \"c3\", \"c4\"],\n",
913900
")\n",
@@ -1619,10 +1606,10 @@
16191606
"\n",
16201607
"\n",
16211608
"@widgets.interact\n",
1622-
"def f(h_neg=(0, 359, 1), h_pos=(0, 359), s=(0.0, 99.9), l_var=(0.0, 99.9)):\n",
1609+
"def f(h_neg=(0, 359, 1), h_pos=(0, 359), s=(0.0, 99.9), l=(0.0, 99.9)):\n",
16231610
" return df2.style.background_gradient(\n",
16241611
" cmap=sns.palettes.diverging_palette(\n",
1625-
" h_neg=h_neg, h_pos=h_pos, s=s, l=l_var, as_cmap=True\n",
1612+
" h_neg=h_neg, h_pos=h_pos, s=s, l=l, as_cmap=True\n",
16261613
" )\n",
16271614
" )"
16281615
]
@@ -1642,13 +1629,13 @@
16421629
"source": [
16431630
"def magnify():\n",
16441631
" return [\n",
1645-
" {\"selector\": \"th\", \"props\": [(\"font-size\", \"4pt\")]},\n",
1646-
" {\"selector\": \"td\", \"props\": [(\"padding\", \"0em 0em\")]},\n",
1647-
" {\"selector\": \"th:hover\", \"props\": [(\"font-size\", \"12pt\")]},\n",
1648-
" {\n",
1649-
" \"selector\": \"tr:hover td:hover\",\n",
1650-
" \"props\": [(\"max-width\", \"200px\"), (\"font-size\", \"12pt\")],\n",
1651-
" },\n",
1632+
" dict(selector=\"th\", props=[(\"font-size\", \"4pt\")]),\n",
1633+
" dict(selector=\"td\", props=[(\"padding\", \"0em 0em\")]),\n",
1634+
" dict(selector=\"th:hover\", props=[(\"font-size\", \"12pt\")]),\n",
1635+
" dict(\n",
1636+
" selector=\"tr:hover td:hover\",\n",
1637+
" props=[(\"max-width\", \"200px\"), (\"font-size\", \"12pt\")],\n",
1638+
" ),\n",
16521639
" ]"
16531640
]
16541641
},
@@ -1658,8 +1645,9 @@
16581645
"metadata": {},
16591646
"outputs": [],
16601647
"source": [
1661-
"cmap = sns.diverging_palette(5, 250, as_cmap=True)\n",
1662-
"bigdf = pd.DataFrame(np.random.default_rng(2).standard_normal(20, 25)).cumsum()\n",
1648+
"np.random.seed(25)\n",
1649+
"cmap = cmap = sns.diverging_palette(5, 250, as_cmap=True)\n",
1650+
"bigdf = pd.DataFrame(np.random.randn(20, 25)).cumsum()\n",
16631651
"\n",
16641652
"bigdf.style.background_gradient(cmap, axis=1).set_properties(\n",
16651653
" **{\"max-width\": \"80px\", \"font-size\": \"1pt\"}\n",
@@ -1683,7 +1671,7 @@
16831671
"metadata": {},
16841672
"outputs": [],
16851673
"source": [
1686-
"bigdf = pd.DataFrame(np.random.default_rng(2).standard_normal(16, 100))\n",
1674+
"bigdf = pd.DataFrame(np.random.randn(16, 100))\n",
16871675
"bigdf.style.set_sticky(axis=\"index\")"
16881676
]
16891677
},
@@ -2035,8 +2023,8 @@
20352023
"metadata": {},
20362024
"outputs": [],
20372025
"source": [
2038-
"with open(\"templates/myhtml.tpl\") as fle:\n",
2039-
" print(fle.read())"
2026+
"with open(\"templates/myhtml.tpl\") as f:\n",
2027+
" print(f.read())"
20402028
]
20412029
},
20422030
{
@@ -2142,8 +2130,8 @@
21422130
},
21432131
"outputs": [],
21442132
"source": [
2145-
"with open(\"templates/html_style_structure.html\") as fl:\n",
2146-
" style_structure = fl.read()"
2133+
"with open(\"templates/html_style_structure.html\") as f:\n",
2134+
" style_structure = f.read()"
21472135
]
21482136
},
21492137
{
@@ -2170,8 +2158,8 @@
21702158
},
21712159
"outputs": [],
21722160
"source": [
2173-
"with open(\"templates/html_table_structure.html\") as f_tbl:\n",
2174-
" table_structure = f_tbl.read()"
2161+
"with open(\"templates/html_table_structure.html\") as f:\n",
2162+
" table_structure = f.read()"
21752163
]
21762164
},
21772165
{

pandas/core/arrays/categorical.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -569,8 +569,8 @@ def astype(self, dtype: AstypeArg, copy: bool = True) -> ArrayLike:
569569
elif isinstance(dtype, CategoricalDtype):
570570
# GH 10696/18593/18630
571571
dtype = self.dtype.update_dtype(dtype)
572-
result = self.copy() if copy else self
573-
result = result._set_dtype(dtype)
572+
self = self.copy() if copy else self
573+
result = self._set_dtype(dtype)
574574

575575
elif isinstance(dtype, ExtensionDtype):
576576
return super().astype(dtype, copy=copy)

pandas/core/arrays/datetimelike.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,7 @@ def astype(self, dtype, copy: bool = True):
454454

455455
if dtype == object:
456456
if self.dtype.kind == "M":
457-
self = cast("DatetimeArray", self) # noqa: PLW0642
457+
self = cast("DatetimeArray", self)
458458
# *much* faster than self._box_values
459459
# for e.g. test_get_loc_tuple_monotonic_above_size_cutoff
460460
i8data = self.asi8
@@ -776,7 +776,7 @@ def isin(self, values: ArrayLike) -> npt.NDArray[np.bool_]:
776776
return np.zeros(self.shape, dtype=bool)
777777

778778
if self.dtype.kind in "mM":
779-
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
779+
self = cast("DatetimeArray | TimedeltaArray", self)
780780
# error: "DatetimeLikeArrayMixin" has no attribute "as_unit"
781781
values = values.as_unit(self.unit) # type: ignore[attr-defined]
782782

@@ -977,7 +977,7 @@ def _cmp_method(self, other, op):
977977
return result
978978

979979
if not isinstance(self.dtype, PeriodDtype):
980-
self = cast(TimelikeOps, self) # noqa: PLW0642
980+
self = cast(TimelikeOps, self)
981981
if self._creso != other._creso:
982982
if not isinstance(other, type(self)):
983983
# i.e. Timedelta/Timestamp, cast to ndarray and let
@@ -1063,7 +1063,7 @@ def _add_datetimelike_scalar(self, other) -> DatetimeArray:
10631063
f"cannot add {type(self).__name__} and {type(other).__name__}"
10641064
)
10651065

1066-
self = cast("TimedeltaArray", self) # noqa: PLW0642
1066+
self = cast("TimedeltaArray", self)
10671067

10681068
from pandas.core.arrays import DatetimeArray
10691069
from pandas.core.arrays.datetimes import tz_to_dtype
@@ -1078,8 +1078,8 @@ def _add_datetimelike_scalar(self, other) -> DatetimeArray:
10781078
return DatetimeArray._simple_new(result, dtype=result.dtype)
10791079

10801080
other = Timestamp(other)
1081-
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
1082-
self = cast("TimedeltaArray", self) # noqa: PLW0642
1081+
self, other = self._ensure_matching_resos(other)
1082+
self = cast("TimedeltaArray", self)
10831083

10841084
other_i8, o_mask = self._get_i8_values_and_mask(other)
10851085
result = add_overflowsafe(self.asi8, np.asarray(other_i8, dtype="i8"))
@@ -1107,7 +1107,7 @@ def _sub_datetimelike_scalar(
11071107
if self.dtype.kind != "M":
11081108
raise TypeError(f"cannot subtract a datelike from a {type(self).__name__}")
11091109

1110-
self = cast("DatetimeArray", self) # noqa: PLW0642
1110+
self = cast("DatetimeArray", self)
11111111
# subtract a datetime from myself, yielding a ndarray[timedelta64[ns]]
11121112

11131113
if isna(other):
@@ -1116,7 +1116,7 @@ def _sub_datetimelike_scalar(
11161116

11171117
ts = Timestamp(other)
11181118

1119-
self, ts = self._ensure_matching_resos(ts) # noqa: PLW0642
1119+
self, ts = self._ensure_matching_resos(ts)
11201120
return self._sub_datetimelike(ts)
11211121

11221122
@final
@@ -1127,14 +1127,14 @@ def _sub_datetime_arraylike(self, other: DatetimeArray) -> TimedeltaArray:
11271127
if len(self) != len(other):
11281128
raise ValueError("cannot add indices of unequal length")
11291129

1130-
self = cast("DatetimeArray", self) # noqa: PLW0642
1130+
self = cast("DatetimeArray", self)
11311131

1132-
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
1132+
self, other = self._ensure_matching_resos(other)
11331133
return self._sub_datetimelike(other)
11341134

11351135
@final
11361136
def _sub_datetimelike(self, other: Timestamp | DatetimeArray) -> TimedeltaArray:
1137-
self = cast("DatetimeArray", self) # noqa: PLW0642
1137+
self = cast("DatetimeArray", self)
11381138

11391139
from pandas.core.arrays import TimedeltaArray
11401140

@@ -1183,9 +1183,9 @@ def _add_timedeltalike_scalar(self, other):
11831183
return type(self)._simple_new(new_values, dtype=self.dtype)
11841184

11851185
# PeriodArray overrides, so we only get here with DTA/TDA
1186-
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
1186+
self = cast("DatetimeArray | TimedeltaArray", self)
11871187
other = Timedelta(other)
1188-
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
1188+
self, other = self._ensure_matching_resos(other)
11891189
return self._add_timedeltalike(other)
11901190

11911191
def _add_timedelta_arraylike(self, other: TimedeltaArray) -> Self:
@@ -1201,7 +1201,7 @@ def _add_timedelta_arraylike(self, other: TimedeltaArray) -> Self:
12011201
if len(self) != len(other):
12021202
raise ValueError("cannot add indices of unequal length")
12031203

1204-
self, other = cast( # noqa: PLW0642
1204+
self, other = cast(
12051205
"DatetimeArray | TimedeltaArray", self
12061206
)._ensure_matching_resos(other)
12071207
return self._add_timedeltalike(other)
@@ -1258,7 +1258,7 @@ def _sub_nat(self) -> np.ndarray:
12581258
result.fill(iNaT)
12591259
if self.dtype.kind in "mM":
12601260
# We can retain unit in dtype
1261-
self = cast("DatetimeArray| TimedeltaArray", self) # noqa: PLW0642
1261+
self = cast("DatetimeArray| TimedeltaArray", self)
12621262
return result.view(f"timedelta64[{self.unit}]")
12631263
else:
12641264
return result.view("timedelta64[ns]")
@@ -1272,7 +1272,7 @@ def _sub_periodlike(self, other: Period | PeriodArray) -> npt.NDArray[np.object_
12721272
f"cannot subtract {type(other).__name__} from {type(self).__name__}"
12731273
)
12741274

1275-
self = cast("PeriodArray", self) # noqa: PLW0642
1275+
self = cast("PeriodArray", self)
12761276
self._check_compatible_with(other)
12771277

12781278
other_i8, o_mask = self._get_i8_values_and_mask(other)
@@ -1478,7 +1478,7 @@ def __rsub__(self, other):
14781478
# TODO: Can we simplify/generalize these cases at all?
14791479
raise TypeError(f"cannot subtract {type(self).__name__} from {other.dtype}")
14801480
elif lib.is_np_dtype(self.dtype, "m"):
1481-
self = cast("TimedeltaArray", self) # noqa: PLW0642
1481+
self = cast("TimedeltaArray", self)
14821482
return (-self) + other
14831483

14841484
# We get here with e.g. datetime objects
@@ -1697,7 +1697,7 @@ def _groupby_op(
16971697

16981698
if isinstance(self.dtype, PeriodDtype):
16991699
raise TypeError("'std' and 'sem' are not valid for PeriodDtype")
1700-
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
1700+
self = cast("DatetimeArray | TimedeltaArray", self)
17011701
new_dtype = f"m8[{self.unit}]"
17021702
res_values = res_values.view(new_dtype)
17031703
return TimedeltaArray._simple_new(res_values, dtype=res_values.dtype)
@@ -2133,7 +2133,7 @@ def _ensure_matching_resos(self, other):
21332133
if self._creso != other._creso:
21342134
# Just as with Timestamp/Timedelta, we cast to the higher resolution
21352135
if self._creso < other._creso:
2136-
self = self.as_unit(other.unit) # noqa: PLW0642
2136+
self = self.as_unit(other.unit)
21372137
else:
21382138
other = other.as_unit(self.unit)
21392139
return self, other
@@ -2155,7 +2155,7 @@ def _round(self, freq, mode, ambiguous, nonexistent):
21552155
# round the local times
21562156
if isinstance(self.dtype, DatetimeTZDtype):
21572157
# operate on naive timestamps, then convert back to aware
2158-
self = cast("DatetimeArray", self) # noqa: PLW0642
2158+
self = cast("DatetimeArray", self)
21592159
naive = self.tz_localize(None)
21602160
result = naive._round(freq, mode, ambiguous, nonexistent)
21612161
return result.tz_localize(

0 commit comments

Comments
 (0)