Skip to content

Commit 992515f

Browse files
authored
chore: enable more Ruff rules (RET505, A005, ISC001, PD003) (#2923)
1 parent f76311d commit 992515f

39 files changed

+270
-347
lines changed

narwhals/__init__.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,5 @@ def __getattr__(name: _t.Literal["__version__"]) -> str: # type: ignore[misc]
181181

182182
__version__ = metadata.version(__name__)
183183
return __version__
184-
else:
185-
msg = f"module {__name__!r} has no attribute {name!r}"
186-
raise AttributeError(msg)
184+
msg = f"module {__name__!r} has no attribute {name!r}"
185+
raise AttributeError(msg)

narwhals/_arrow/dataframe.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -497,11 +497,10 @@ def head(self, n: int) -> Self:
497497
df = self.native
498498
if n >= 0:
499499
return self._with_native(df.slice(0, n), validate_column_names=False)
500-
else:
501-
num_rows = df.num_rows
502-
return self._with_native(
503-
df.slice(0, max(0, num_rows + n)), validate_column_names=False
504-
)
500+
num_rows = df.num_rows
501+
return self._with_native(
502+
df.slice(0, max(0, num_rows + n)), validate_column_names=False
503+
)
505504

506505
def tail(self, n: int) -> Self:
507506
df = self.native
@@ -510,13 +509,12 @@ def tail(self, n: int) -> Self:
510509
return self._with_native(
511510
df.slice(max(0, num_rows - n)), validate_column_names=False
512511
)
513-
else:
514-
return self._with_native(df.slice(abs(n)), validate_column_names=False)
512+
return self._with_native(df.slice(abs(n)), validate_column_names=False)
515513

516514
def lazy(self, *, backend: Implementation | None = None) -> CompliantLazyFrameAny:
517515
if backend is None:
518516
return self
519-
elif backend is Implementation.DUCKDB:
517+
if backend is Implementation.DUCKDB:
520518
import duckdb # ignore-banned-import
521519

522520
from narwhals._duckdb.dataframe import DuckDBLazyFrame
@@ -525,7 +523,7 @@ def lazy(self, *, backend: Implementation | None = None) -> CompliantLazyFrameAn
525523
return DuckDBLazyFrame(
526524
duckdb.table("df"), validate_backend_version=True, version=self._version
527525
)
528-
elif backend is Implementation.POLARS:
526+
if backend is Implementation.POLARS:
529527
import polars as pl # ignore-banned-import
530528

531529
from narwhals._polars.dataframe import PolarsLazyFrame
@@ -535,7 +533,7 @@ def lazy(self, *, backend: Implementation | None = None) -> CompliantLazyFrameAn
535533
validate_backend_version=True,
536534
version=self._version,
537535
)
538-
elif backend is Implementation.DASK:
536+
if backend is Implementation.DASK:
539537
import dask.dataframe as dd # ignore-banned-import
540538

541539
from narwhals._dask.dataframe import DaskLazyFrame
@@ -545,7 +543,7 @@ def lazy(self, *, backend: Implementation | None = None) -> CompliantLazyFrameAn
545543
validate_backend_version=True,
546544
version=self._version,
547545
)
548-
elif backend.is_ibis():
546+
if backend.is_ibis():
549547
import ibis # ignore-banned-import
550548

551549
from narwhals._ibis.dataframe import IbisLazyFrame
@@ -609,7 +607,7 @@ def item(self, row: int | None, column: int | str | None) -> Any:
609607
raise ValueError(msg)
610608
return maybe_extract_py_scalar(self.native[0][0], return_py_scalar=True)
611609

612-
elif row is None or column is None:
610+
if row is None or column is None:
613611
msg = "cannot call `.item()` with only one of `row` or `column`"
614612
raise ValueError(msg)
615613

narwhals/_arrow/series.py

Lines changed: 16 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -380,29 +380,27 @@ def skew(self, *, _return_py_scalar: bool = True) -> float | None:
380380
ser_not_null = self.native.drop_null()
381381
if len(ser_not_null) == 0:
382382
return None
383-
elif len(ser_not_null) == 1:
383+
if len(ser_not_null) == 1:
384384
return float("nan")
385-
elif len(ser_not_null) == 2:
385+
if len(ser_not_null) == 2:
386386
return 0.0
387-
else:
388-
m = pc.subtract(ser_not_null, pc.mean(ser_not_null))
389-
m2 = pc.mean(pc.power(m, lit(2)))
390-
m3 = pc.mean(pc.power(m, lit(3)))
391-
biased_population_skewness = pc.divide(m3, pc.power(m2, lit(1.5)))
392-
return maybe_extract_py_scalar(biased_population_skewness, _return_py_scalar)
387+
m = pc.subtract(ser_not_null, pc.mean(ser_not_null))
388+
m2 = pc.mean(pc.power(m, lit(2)))
389+
m3 = pc.mean(pc.power(m, lit(3)))
390+
biased_population_skewness = pc.divide(m3, pc.power(m2, lit(1.5)))
391+
return maybe_extract_py_scalar(biased_population_skewness, _return_py_scalar)
393392

394393
def kurtosis(self, *, _return_py_scalar: bool = True) -> float | None:
395394
ser_not_null = self.native.drop_null()
396395
if len(ser_not_null) == 0:
397396
return None
398-
elif len(ser_not_null) == 1:
397+
if len(ser_not_null) == 1:
399398
return float("nan")
400-
else:
401-
m = pc.subtract(ser_not_null, pc.mean(ser_not_null))
402-
m2 = pc.mean(pc.power(m, lit(2)))
403-
m4 = pc.mean(pc.power(m, lit(4)))
404-
k = pc.subtract(pc.divide(m4, pc.power(m2, lit(2))), lit(3))
405-
return maybe_extract_py_scalar(k, _return_py_scalar)
399+
m = pc.subtract(ser_not_null, pc.mean(ser_not_null))
400+
m2 = pc.mean(pc.power(m, lit(2)))
401+
m4 = pc.mean(pc.power(m, lit(4)))
402+
k = pc.subtract(pc.divide(m4, pc.power(m2, lit(2))), lit(3))
403+
return maybe_extract_py_scalar(k, _return_py_scalar)
406404

407405
def count(self, *, _return_py_scalar: bool = True) -> int:
408406
return maybe_extract_py_scalar(pc.count(self.native), _return_py_scalar)
@@ -563,16 +561,14 @@ def null_count(self, *, _return_py_scalar: bool = True) -> int:
563561
def head(self, n: int) -> Self:
564562
if n >= 0:
565563
return self._with_native(self.native.slice(0, n))
566-
else:
567-
num_rows = len(self)
568-
return self._with_native(self.native.slice(0, max(0, num_rows + n)))
564+
num_rows = len(self)
565+
return self._with_native(self.native.slice(0, max(0, num_rows + n)))
569566

570567
def tail(self, n: int) -> Self:
571568
if n >= 0:
572569
num_rows = len(self)
573570
return self._with_native(self.native.slice(max(0, num_rows - n)))
574-
else:
575-
return self._with_native(self.native.slice(abs(n)))
571+
return self._with_native(self.native.slice(abs(n)))
576572

577573
def is_in(self, other: Any) -> Self:
578574
if self._is_native(other):

narwhals/_arrow/series_dt.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,13 +98,12 @@ def timestamp(self, time_unit: TimeUnit) -> ArrowSeries:
9898
msg = f"unexpected time unit {current}, please report an issue at https://github.com/narwhals-dev/narwhals"
9999
raise AssertionError(msg)
100100
return self.with_native(result)
101-
elif isinstance(ser.dtype, dtypes.Date):
101+
if isinstance(ser.dtype, dtypes.Date):
102102
time_s = pc.multiply(self.native.cast(pa.int32()), lit(SECONDS_PER_DAY))
103103
factor = self._TIMESTAMP_DATE_FACTOR[time_unit]
104104
return self.with_native(pc.multiply(time_s, lit(factor)))
105-
else:
106-
msg = "Input should be either of Date or Datetime type"
107-
raise TypeError(msg)
105+
msg = "Input should be either of Date or Datetime type"
106+
raise TypeError(msg)
108107

109108
def date(self) -> ArrowSeries:
110109
return self.with_native(self.native.cast(pa.date32()))

narwhals/_arrow/utils.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,7 @@ def chunked_array(
8888
return arr
8989
if isinstance(arr, list):
9090
return pa.chunked_array(arr, dtype)
91-
else:
92-
return pa.chunked_array([arr], arr.type)
91+
return pa.chunked_array([arr], arr.type)
9392

9493

9594
def nulls_like(n: int, series: ArrowSeries) -> ArrayAny:
@@ -381,7 +380,7 @@ def _parse_date_format(arr: pc.StringArray) -> str:
381380
matches = pc.extract_regex(arr, pattern=date_rgx)
382381
if date_fmt == "%Y%m%d" and pc.all(matches.is_valid()).as_py():
383382
return date_fmt
384-
elif (
383+
if (
385384
pc.all(matches.is_valid()).as_py()
386385
and pc.count(pc.unique(sep1 := matches.field("sep1"))).as_py() == 1
387386
and pc.count(pc.unique(sep2 := matches.field("sep2"))).as_py() == 1

narwhals/_compliant/group_by.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,7 @@ def _parse_keys(
8181
if is_sequence_of(keys, str):
8282
keys_str = list(keys)
8383
return compliant_frame, keys_str, keys_str.copy()
84-
else:
85-
return self._parse_expr_keys(compliant_frame, keys=keys)
84+
return self._parse_expr_keys(compliant_frame, keys=keys)
8685

8786
@staticmethod
8887
def _parse_expr_keys(

narwhals/_compliant/namespace.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -135,9 +135,8 @@ def _lazyframe(self) -> type[CompliantLazyFrameT]: ...
135135
def from_native(self, data: NativeFrameT_co | Any, /) -> CompliantLazyFrameT:
136136
if self._lazyframe._is_native(data):
137137
return self._lazyframe.from_native(data, context=self)
138-
else: # pragma: no cover
139-
msg = f"Unsupported type: {type(data).__name__!r}"
140-
raise TypeError(msg)
138+
msg = f"Unsupported type: {type(data).__name__!r}" # pragma: no cover
139+
raise TypeError(msg)
141140

142141

143142
class EagerNamespace(
@@ -165,7 +164,7 @@ def from_native(
165164
) -> EagerDataFrameT | EagerSeriesT:
166165
if self._dataframe._is_native(data):
167166
return self._dataframe.from_native(data, context=self)
168-
elif self._series._is_native(data):
167+
if self._series._is_native(data):
169168
return self._series.from_native(data, context=self)
170169
msg = f"Unsupported type: {type(data).__name__!r}"
171170
raise TypeError(msg)

narwhals/_compliant/series.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -352,12 +352,11 @@ def _gather_slice(self, rows: _SliceIndex | range) -> Self: ...
352352
def __getitem__(self, item: MultiIndexSelector[Self]) -> Self:
353353
if isinstance(item, (slice, range)):
354354
return self._gather_slice(item)
355-
elif is_compliant_series(item):
355+
if is_compliant_series(item):
356356
return self._gather(item.native)
357-
elif is_sized_multi_index_selector(item):
357+
elif is_sized_multi_index_selector(item): # noqa: RET505
358358
return self._gather(item)
359-
else:
360-
assert_never(item)
359+
assert_never(item)
361360

362361
@property
363362
def str(self) -> EagerSeriesStringNamespace[Self, NativeSeriesT]: ...

narwhals/_dask/dataframe.py

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -219,19 +219,14 @@ def with_row_index(self, name: str, order_by: Sequence[str] | None) -> Self:
219219
# https://stackoverflow.com/questions/60831518/in-dask-how-does-one-add-a-range-of-integersauto-increment-to-a-new-column/60852409#60852409
220220
if order_by is None:
221221
return self._with_native(add_row_index(self.native, name))
222-
else:
223-
plx = self.__narwhals_namespace__()
224-
columns = self.columns
225-
const_expr = (
226-
plx.lit(value=1, dtype=None).alias(name).broadcast(ExprKind.LITERAL)
227-
)
228-
row_index_expr = (
229-
plx.col(name)
230-
.cum_sum(reverse=False)
231-
.over(partition_by=[], order_by=order_by)
232-
- 1
233-
)
234-
return self.with_columns(const_expr).select(row_index_expr, plx.col(*columns))
222+
plx = self.__narwhals_namespace__()
223+
columns = self.columns
224+
const_expr = plx.lit(value=1, dtype=None).alias(name).broadcast(ExprKind.LITERAL)
225+
row_index_expr = (
226+
plx.col(name).cum_sum(reverse=False).over(partition_by=[], order_by=order_by)
227+
- 1
228+
)
229+
return self.with_columns(const_expr).select(row_index_expr, plx.col(*columns))
235230

236231
def rename(self, mapping: Mapping[str, str]) -> Self:
237232
return self._with_native(self.native.rename(columns=mapping))
@@ -450,9 +445,10 @@ def tail(self, n: int) -> Self: # pragma: no cover
450445

451446
if n_partitions == 1:
452447
return self._with_native(self.native.tail(n=n, compute=False))
453-
else:
454-
msg = "`LazyFrame.tail` is not supported for Dask backend with multiple partitions."
455-
raise NotImplementedError(msg)
448+
msg = (
449+
"`LazyFrame.tail` is not supported for Dask backend with multiple partitions."
450+
)
451+
raise NotImplementedError(msg)
456452

457453
def gather_every(self, n: int, offset: int) -> Self:
458454
row_index_token = generate_temporary_column_name(n_bytes=8, columns=self.columns)

narwhals/_dask/expr.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -397,9 +397,8 @@ def rolling_var(
397397
).var(),
398398
"rolling_var",
399399
)
400-
else:
401-
msg = "Dask backend only supports `ddof=1` for `rolling_var`"
402-
raise NotImplementedError(msg)
400+
msg = "Dask backend only supports `ddof=1` for `rolling_var`"
401+
raise NotImplementedError(msg)
403402

404403
def rolling_std(
405404
self, window_size: int, *, min_samples: int, center: bool, ddof: int
@@ -411,9 +410,8 @@ def rolling_std(
411410
).std(),
412411
"rolling_std",
413412
)
414-
else:
415-
msg = "Dask backend only supports `ddof=1` for `rolling_std`"
416-
raise NotImplementedError(msg)
413+
msg = "Dask backend only supports `ddof=1` for `rolling_std`"
414+
raise NotImplementedError(msg)
417415

418416
def sum(self) -> Self:
419417
return self._with_callable(lambda expr: expr.sum().to_series(), "sum")
@@ -520,9 +518,8 @@ def func(expr: dx.Series, quantile: float) -> dx.Series:
520518
).to_series() # pragma: no cover
521519

522520
return self._with_callable(func, "quantile", quantile=quantile)
523-
else:
524-
msg = "`higher`, `lower`, `midpoint`, `nearest` - interpolation methods are not supported by Dask. Please use `linear` instead."
525-
raise NotImplementedError(msg)
521+
msg = "`higher`, `lower`, `midpoint`, `nearest` - interpolation methods are not supported by Dask. Please use `linear` instead."
522+
raise NotImplementedError(msg)
526523

527524
def is_first_distinct(self) -> Self:
528525
def func(expr: dx.Series) -> dx.Series:

0 commit comments

Comments
 (0)