Skip to content

Commit 50481ca

Browse files
committed
Address new ruff checks
1 parent ba16174 commit 50481ca

File tree

3 files changed

+64
-52
lines changed

3 files changed

+64
-52
lines changed

doc/source/user_guide/style.ipynb

Lines changed: 42 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@
100100
"outputs": [],
101101
"source": [
102102
"weather_df = pd.DataFrame(\n",
103-
" np.random.rand(10, 2) * 5,\n",
103+
" np.random.default_rng(2).standard_normal(10, 2) * 5,\n",
104104
" index=pd.date_range(start=\"2021-01-01\", periods=10),\n",
105105
" columns=[\"Tokyo\", \"Beijing\"],\n",
106106
")\n",
@@ -157,7 +157,7 @@
157157
"metadata": {},
158158
"outputs": [],
159159
"source": [
160-
"df = pd.DataFrame(np.random.randn(5, 5))\n",
160+
"df = pd.DataFrame(np.random.default_rng(2).standard_normal(5, 5))\n",
161161
"df.style.hide(subset=[0, 2, 4], axis=0).hide(subset=[0, 2, 4], axis=1)"
162162
]
163163
},
@@ -302,9 +302,16 @@
302302
" columns=df.columns,\n",
303303
" ),\n",
304304
" css_class=\"pd-tt\",\n",
305-
" props=\"visibility: hidden; position: absolute; z-index: 1; border: 1px solid #000066;\"\n",
306-
" \"background-color: white; color: #000066; font-size: 0.8em;\"\n",
307-
" \"transform: translate(0px, -24px); padding: 0.6em; border-radius: 0.5em;\",\n",
305+
" props=\"visibility: hidden;\"\n",
306+
" \"position: absolute;\"\n",
307+
" \"z-index: 1;\"\n",
308+
" \"border: 1px solid #000066;\"\n",
309+
" \"background-color: white;\"\n",
310+
" \"color: #000066;\"\n",
311+
" \"font-size: 0.8em;\"\n",
312+
" \"transform: translate(0px, -24px);\"\n",
313+
" \"padding: 0.6em;\"\n",
314+
" \"border-radius: 0.5em;\",\n",
308315
" )\n",
309316
")"
310317
]
@@ -602,8 +609,9 @@
602609
"metadata": {},
603610
"outputs": [],
604611
"source": [
605-
"np.random.seed(0)\n",
606-
"df2 = pd.DataFrame(np.random.randn(10, 4), columns=[\"A\", \"B\", \"C\", \"D\"])\n",
612+
"df2 = pd.DataFrame(\n",
613+
" np.random.default_rng(2).standard_normal(10, 4), columns=[\"A\", \"B\", \"C\", \"D\"]\n",
614+
")\n",
607615
"df2.style"
608616
]
609617
},
@@ -812,9 +820,14 @@
812820
")\n",
813821
"s.set_tooltips(\n",
814822
" tt,\n",
815-
" props=\"visibility: hidden; position: absolute; z-index: 1; border: 1px solid #000066;\"\n",
816-
" \"background-color: white; color: #000066; font-size: 0.8em;\"\n",
817-
" \"transform: translate(0px, -24px); padding: 0.6em; border-radius: 0.5em;\",\n",
823+
" props=\"visibility: hidden;\"\n",
824+
" \"position: absolute; z-index: 1;\"\n",
825+
" \"border: 1px solid #000066;\"\n",
826+
" \"background-color: white;\"\n",
827+
" \"color: #000066;\"\n",
828+
" \"font-size: 0.8em;\"\n",
829+
" \"transform: translate(0px, -24px);\"\n",
830+
" \"padding: 0.6em; border-radius: 0.5em;\",\n",
818831
")"
819832
]
820833
},
@@ -894,7 +907,7 @@
894907
"outputs": [],
895908
"source": [
896909
"df3 = pd.DataFrame(\n",
897-
" np.random.randn(4, 4),\n",
910+
" np.random.default_rng(2).standard_normal(4, 4),\n",
898911
" pd.MultiIndex.from_product([[\"A\", \"B\"], [\"r1\", \"r2\"]]),\n",
899912
" columns=[\"c1\", \"c2\", \"c3\", \"c4\"],\n",
900913
")\n",
@@ -1606,10 +1619,10 @@
16061619
"\n",
16071620
"\n",
16081621
"@widgets.interact\n",
1609-
"def f(h_neg=(0, 359, 1), h_pos=(0, 359), s=(0.0, 99.9), l=(0.0, 99.9)):\n",
1622+
"def f(h_neg=(0, 359, 1), h_pos=(0, 359), s=(0.0, 99.9), l_var=(0.0, 99.9)):\n",
16101623
" return df2.style.background_gradient(\n",
16111624
" cmap=sns.palettes.diverging_palette(\n",
1612-
" h_neg=h_neg, h_pos=h_pos, s=s, l=l, as_cmap=True\n",
1625+
" h_neg=h_neg, h_pos=h_pos, s=s, l=l_var, as_cmap=True\n",
16131626
" )\n",
16141627
" )"
16151628
]
@@ -1629,13 +1642,13 @@
16291642
"source": [
16301643
"def magnify():\n",
16311644
" return [\n",
1632-
" dict(selector=\"th\", props=[(\"font-size\", \"4pt\")]),\n",
1633-
" dict(selector=\"td\", props=[(\"padding\", \"0em 0em\")]),\n",
1634-
" dict(selector=\"th:hover\", props=[(\"font-size\", \"12pt\")]),\n",
1635-
" dict(\n",
1636-
" selector=\"tr:hover td:hover\",\n",
1637-
" props=[(\"max-width\", \"200px\"), (\"font-size\", \"12pt\")],\n",
1638-
" ),\n",
1645+
" {\"selector\": \"th\", \"props\": [(\"font-size\", \"4pt\")]},\n",
1646+
" {\"selector\": \"td\", \"props\": [(\"padding\", \"0em 0em\")]},\n",
1647+
" {\"selector\": \"th:hover\", \"props\": [(\"font-size\", \"12pt\")]},\n",
1648+
" {\n",
1649+
" \"selector\": \"tr:hover td:hover\",\n",
1650+
" \"props\": [(\"max-width\", \"200px\"), (\"font-size\", \"12pt\")],\n",
1651+
" },\n",
16391652
" ]"
16401653
]
16411654
},
@@ -1645,9 +1658,8 @@
16451658
"metadata": {},
16461659
"outputs": [],
16471660
"source": [
1648-
"np.random.seed(25)\n",
1649-
"cmap = cmap = sns.diverging_palette(5, 250, as_cmap=True)\n",
1650-
"bigdf = pd.DataFrame(np.random.randn(20, 25)).cumsum()\n",
1661+
"cmap = sns.diverging_palette(5, 250, as_cmap=True)\n",
1662+
"bigdf = pd.DataFrame(np.random.default_rng(2).standard_normal(20, 25)).cumsum()\n",
16511663
"\n",
16521664
"bigdf.style.background_gradient(cmap, axis=1).set_properties(\n",
16531665
" **{\"max-width\": \"80px\", \"font-size\": \"1pt\"}\n",
@@ -1671,7 +1683,7 @@
16711683
"metadata": {},
16721684
"outputs": [],
16731685
"source": [
1674-
"bigdf = pd.DataFrame(np.random.randn(16, 100))\n",
1686+
"bigdf = pd.DataFrame(np.random.default_rng(2).standard_normal(16, 100))\n",
16751687
"bigdf.style.set_sticky(axis=\"index\")"
16761688
]
16771689
},
@@ -2023,8 +2035,8 @@
20232035
"metadata": {},
20242036
"outputs": [],
20252037
"source": [
2026-
"with open(\"templates/myhtml.tpl\") as f:\n",
2027-
" print(f.read())"
2038+
"with open(\"templates/myhtml.tpl\") as fle:\n",
2039+
" print(fle.read())"
20282040
]
20292041
},
20302042
{
@@ -2130,8 +2142,8 @@
21302142
},
21312143
"outputs": [],
21322144
"source": [
2133-
"with open(\"templates/html_style_structure.html\") as f:\n",
2134-
" style_structure = f.read()"
2145+
"with open(\"templates/html_style_structure.html\") as fl:\n",
2146+
" style_structure = fl.read()"
21352147
]
21362148
},
21372149
{
@@ -2158,8 +2170,8 @@
21582170
},
21592171
"outputs": [],
21602172
"source": [
2161-
"with open(\"templates/html_table_structure.html\") as f:\n",
2162-
" table_structure = f.read()"
2173+
"with open(\"templates/html_table_structure.html\") as f_tbl:\n",
2174+
" table_structure = f_tbl.read()"
21632175
]
21642176
},
21652177
{

pandas/core/arrays/categorical.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -569,8 +569,8 @@ def astype(self, dtype: AstypeArg, copy: bool = True) -> ArrayLike:
569569
elif isinstance(dtype, CategoricalDtype):
570570
# GH 10696/18593/18630
571571
dtype = self.dtype.update_dtype(dtype)
572-
self = self.copy() if copy else self
573-
result = self._set_dtype(dtype)
572+
result = self.copy() if copy else self
573+
result = result._set_dtype(dtype)
574574

575575
elif isinstance(dtype, ExtensionDtype):
576576
return super().astype(dtype, copy=copy)

pandas/core/arrays/datetimelike.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,7 @@ def astype(self, dtype, copy: bool = True):
454454

455455
if dtype == object:
456456
if self.dtype.kind == "M":
457-
self = cast("DatetimeArray", self)
457+
self = cast("DatetimeArray", self) # noqa: PLW0642
458458
# *much* faster than self._box_values
459459
# for e.g. test_get_loc_tuple_monotonic_above_size_cutoff
460460
i8data = self.asi8
@@ -776,7 +776,7 @@ def isin(self, values: ArrayLike) -> npt.NDArray[np.bool_]:
776776
return np.zeros(self.shape, dtype=bool)
777777

778778
if self.dtype.kind in "mM":
779-
self = cast("DatetimeArray | TimedeltaArray", self)
779+
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
780780
# error: "DatetimeLikeArrayMixin" has no attribute "as_unit"
781781
values = values.as_unit(self.unit) # type: ignore[attr-defined]
782782

@@ -977,7 +977,7 @@ def _cmp_method(self, other, op):
977977
return result
978978

979979
if not isinstance(self.dtype, PeriodDtype):
980-
self = cast(TimelikeOps, self)
980+
self = cast(TimelikeOps, self) # noqa: PLW0642
981981
if self._creso != other._creso:
982982
if not isinstance(other, type(self)):
983983
# i.e. Timedelta/Timestamp, cast to ndarray and let
@@ -1063,7 +1063,7 @@ def _add_datetimelike_scalar(self, other) -> DatetimeArray:
10631063
f"cannot add {type(self).__name__} and {type(other).__name__}"
10641064
)
10651065

1066-
self = cast("TimedeltaArray", self)
1066+
self = cast("TimedeltaArray", self) # noqa: PLW0642
10671067

10681068
from pandas.core.arrays import DatetimeArray
10691069
from pandas.core.arrays.datetimes import tz_to_dtype
@@ -1078,8 +1078,8 @@ def _add_datetimelike_scalar(self, other) -> DatetimeArray:
10781078
return DatetimeArray._simple_new(result, dtype=result.dtype)
10791079

10801080
other = Timestamp(other)
1081-
self, other = self._ensure_matching_resos(other)
1082-
self = cast("TimedeltaArray", self)
1081+
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
1082+
self = cast("TimedeltaArray", self) # noqa: PLW0642
10831083

10841084
other_i8, o_mask = self._get_i8_values_and_mask(other)
10851085
result = add_overflowsafe(self.asi8, np.asarray(other_i8, dtype="i8"))
@@ -1107,7 +1107,7 @@ def _sub_datetimelike_scalar(
11071107
if self.dtype.kind != "M":
11081108
raise TypeError(f"cannot subtract a datelike from a {type(self).__name__}")
11091109

1110-
self = cast("DatetimeArray", self)
1110+
self = cast("DatetimeArray", self) # noqa: PLW0642
11111111
# subtract a datetime from myself, yielding a ndarray[timedelta64[ns]]
11121112

11131113
if isna(other):
@@ -1116,7 +1116,7 @@ def _sub_datetimelike_scalar(
11161116

11171117
ts = Timestamp(other)
11181118

1119-
self, ts = self._ensure_matching_resos(ts)
1119+
self, ts = self._ensure_matching_resos(ts) # noqa: PLW0642
11201120
return self._sub_datetimelike(ts)
11211121

11221122
@final
@@ -1127,14 +1127,14 @@ def _sub_datetime_arraylike(self, other: DatetimeArray) -> TimedeltaArray:
11271127
if len(self) != len(other):
11281128
raise ValueError("cannot add indices of unequal length")
11291129

1130-
self = cast("DatetimeArray", self)
1130+
self = cast("DatetimeArray", self) # noqa: PLW0642
11311131

1132-
self, other = self._ensure_matching_resos(other)
1132+
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
11331133
return self._sub_datetimelike(other)
11341134

11351135
@final
11361136
def _sub_datetimelike(self, other: Timestamp | DatetimeArray) -> TimedeltaArray:
1137-
self = cast("DatetimeArray", self)
1137+
self = cast("DatetimeArray", self) # noqa: PLW0642
11381138

11391139
from pandas.core.arrays import TimedeltaArray
11401140

@@ -1183,9 +1183,9 @@ def _add_timedeltalike_scalar(self, other):
11831183
return type(self)._simple_new(new_values, dtype=self.dtype)
11841184

11851185
# PeriodArray overrides, so we only get here with DTA/TDA
1186-
self = cast("DatetimeArray | TimedeltaArray", self)
1186+
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
11871187
other = Timedelta(other)
1188-
self, other = self._ensure_matching_resos(other)
1188+
self, other = self._ensure_matching_resos(other) # noqa: PLW0642
11891189
return self._add_timedeltalike(other)
11901190

11911191
def _add_timedelta_arraylike(self, other: TimedeltaArray) -> Self:
@@ -1201,7 +1201,7 @@ def _add_timedelta_arraylike(self, other: TimedeltaArray) -> Self:
12011201
if len(self) != len(other):
12021202
raise ValueError("cannot add indices of unequal length")
12031203

1204-
self, other = cast(
1204+
self, other = cast( # noqa: PLW0642
12051205
"DatetimeArray | TimedeltaArray", self
12061206
)._ensure_matching_resos(other)
12071207
return self._add_timedeltalike(other)
@@ -1258,7 +1258,7 @@ def _sub_nat(self) -> np.ndarray:
12581258
result.fill(iNaT)
12591259
if self.dtype.kind in "mM":
12601260
# We can retain unit in dtype
1261-
self = cast("DatetimeArray| TimedeltaArray", self)
1261+
self = cast("DatetimeArray| TimedeltaArray", self) # noqa: PLW0642
12621262
return result.view(f"timedelta64[{self.unit}]")
12631263
else:
12641264
return result.view("timedelta64[ns]")
@@ -1272,7 +1272,7 @@ def _sub_periodlike(self, other: Period | PeriodArray) -> npt.NDArray[np.object_
12721272
f"cannot subtract {type(other).__name__} from {type(self).__name__}"
12731273
)
12741274

1275-
self = cast("PeriodArray", self)
1275+
self = cast("PeriodArray", self) # noqa: PLW0642
12761276
self._check_compatible_with(other)
12771277

12781278
other_i8, o_mask = self._get_i8_values_and_mask(other)
@@ -1478,7 +1478,7 @@ def __rsub__(self, other):
14781478
# TODO: Can we simplify/generalize these cases at all?
14791479
raise TypeError(f"cannot subtract {type(self).__name__} from {other.dtype}")
14801480
elif lib.is_np_dtype(self.dtype, "m"):
1481-
self = cast("TimedeltaArray", self)
1481+
self = cast("TimedeltaArray", self) # noqa: PLW0642
14821482
return (-self) + other
14831483

14841484
# We get here with e.g. datetime objects
@@ -1697,7 +1697,7 @@ def _groupby_op(
16971697

16981698
if isinstance(self.dtype, PeriodDtype):
16991699
raise TypeError("'std' and 'sem' are not valid for PeriodDtype")
1700-
self = cast("DatetimeArray | TimedeltaArray", self)
1700+
self = cast("DatetimeArray | TimedeltaArray", self) # noqa: PLW0642
17011701
new_dtype = f"m8[{self.unit}]"
17021702
res_values = res_values.view(new_dtype)
17031703
return TimedeltaArray._simple_new(res_values, dtype=res_values.dtype)
@@ -2133,7 +2133,7 @@ def _ensure_matching_resos(self, other):
21332133
if self._creso != other._creso:
21342134
# Just as with Timestamp/Timedelta, we cast to the higher resolution
21352135
if self._creso < other._creso:
2136-
self = self.as_unit(other.unit)
2136+
self = self.as_unit(other.unit) # noqa: PLW0642
21372137
else:
21382138
other = other.as_unit(self.unit)
21392139
return self, other
@@ -2155,7 +2155,7 @@ def _round(self, freq, mode, ambiguous, nonexistent):
21552155
# round the local times
21562156
if isinstance(self.dtype, DatetimeTZDtype):
21572157
# operate on naive timestamps, then convert back to aware
2158-
self = cast("DatetimeArray", self)
2158+
self = cast("DatetimeArray", self) # noqa: PLW0642
21592159
naive = self.tz_localize(None)
21602160
result = naive._round(freq, mode, ambiguous, nonexistent)
21612161
return result.tz_localize(

0 commit comments

Comments
 (0)