Skip to content

Commit ff8674a

Browse files
authored
Use Ruff instead of Black, pyupgrade and isort (#2171)
* Replace black with ruff format for code formatting Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Replace pyupgrade with Ruff check, and select "UP" Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Bump the target version for Ruff up to Python 3.10 Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Ignore `UP045` in cases when used programmatically Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Remedy a violation of Liskov uncertainty principle Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Reformat the affected files using Ruff Black rules Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Replace isort with Ruff for organizing the imports Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> * Replace black with ruff format for code formatting Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu> --------- Signed-off-by: Deepyaman Datta <deepyaman.datta@utexas.edu>
1 parent 496a7cb commit ff8674a

File tree

169 files changed

+1215
-1204
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

169 files changed

+1215
-1204
lines changed

.github/workflows/ci-tests.yml

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -68,12 +68,13 @@ jobs:
6868
- name: Check requirements
6969
run: nox -db uv -r --non-interactive --python ${{ matrix.python-version }} --session requirements-${{ matrix.python-version }}
7070

71-
- name: Sort Imports
72-
run: pre-commit run isort --all-files
71+
- if: always()
72+
uses: astral-sh/ruff-action@v3
7373

74-
- name: Black
75-
if: always()
76-
run: pre-commit run black --all-files
74+
- if: always()
75+
uses: astral-sh/ruff-action@v3
76+
with:
77+
args: "format --check --diff"
7778

7879
- name: Mypy Type Checking
7980
if: always()

.pre-commit-config.yaml

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,12 @@ repos:
3232
hooks:
3333
- id: flynt
3434

35-
- repo: https://github.com/psf/black
36-
rev: 24.4.2
35+
- repo: https://github.com/astral-sh/ruff-pre-commit
36+
rev: v0.14.5
3737
hooks:
38-
- id: black
38+
- id: ruff-check
39+
args: [--fix]
40+
- id: ruff-format
3941

4042
- repo: https://github.com/asottile/pyupgrade
4143
rev: v3.20.0

docs/source/notebooks/try_pandera.ipynb

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
"outputs": [],
7272
"source": [
7373
"import pandas as pd\n",
74+
"\n",
7475
"import pandera.pandas as pa\n",
7576
"from pandera.typing import DataFrame, Series\n",
7677
"\n",
@@ -127,7 +128,7 @@
127128
"@pa.check_types(lazy=True)\n",
128129
"def add_sales_tax(data: DataFrame[Schema]):\n",
129130
" # creates a new column in the data frame that calculates prices after sales tax\n",
130-
" data['after_tax'] = data['price'] + (data['price'] * .06)\n",
131+
" data[\"after_tax\"] = data[\"price\"] + (data[\"price\"] * 0.06)\n",
131132
" return data"
132133
]
133134
},
@@ -153,10 +154,9 @@
153154
"metadata": {},
154155
"outputs": [],
155156
"source": [
156-
"valid_data = pd.DataFrame.from_records([\n",
157-
" {\"item\": \"apple\", \"price\": 0.5},\n",
158-
" {\"item\": \"orange\", \"price\": 0.75}\n",
159-
"])\n",
157+
"valid_data = pd.DataFrame.from_records(\n",
158+
" [{\"item\": \"apple\", \"price\": 0.5}, {\"item\": \"orange\", \"price\": 0.75}]\n",
159+
")\n",
160160
"\n",
161161
"add_sales_tax(valid_data)"
162162
]
@@ -177,10 +177,9 @@
177177
"metadata": {},
178178
"outputs": [],
179179
"source": [
180-
"invalid_data = pd.DataFrame.from_records([\n",
181-
" {\"item\": \"applee\", \"price\": 0.5},\n",
182-
" {\"item\": \"orange\", \"price\": -1000}\n",
183-
"])\n",
180+
"invalid_data = pd.DataFrame.from_records(\n",
181+
" [{\"item\": \"applee\", \"price\": 0.5}, {\"item\": \"orange\", \"price\": -1000}]\n",
182+
")\n",
184183
"\n",
185184
"try:\n",
186185
" add_sales_tax(invalid_data)\n",
@@ -262,6 +261,7 @@
262261
" item: Series[str] = pa.Field(isin=[\"apple\", \"orange\"], coerce=True)\n",
263262
" price: Series[float] = pa.Field(gt=0, coerce=True)\n",
264263
"\n",
264+
"\n",
265265
"class TransformedSchema(Schema):\n",
266266
" expiry: Series[pd.Timestamp] = pa.Field(coerce=True)"
267267
]
@@ -289,13 +289,12 @@
289289
"outputs": [],
290290
"source": [
291291
"from datetime import datetime\n",
292-
"from typing import List\n",
293292
"\n",
294293
"\n",
295294
"@pa.check_types(lazy=True)\n",
296295
"def transform_data(\n",
297296
" data: DataFrame[Schema],\n",
298-
" expiry: List[datetime],\n",
297+
" expiry: list[datetime],\n",
299298
") -> DataFrame[TransformedSchema]:\n",
300299
" return data.assign(expiry=expiry)\n",
301300
"\n",
@@ -381,11 +380,16 @@
381380
" item: Series[str] = pa.Field(isin=[\"apple\", \"orange\"], coerce=True)\n",
382381
" price: Series[float] = pa.Field(gt=0, coerce=True)\n",
383382
"\n",
383+
"\n",
384384
"# object-based API\n",
385-
"schema = pa.DataFrameSchema({\n",
386-
" \"item\": pa.Column(str, pa.Check.isin([\"apple\", \"orange\"]), coerce=True),\n",
387-
" \"price\": pa.Column(float, pa.Check.gt(0), coerce=True),\n",
388-
"})"
385+
"schema = pa.DataFrameSchema(\n",
386+
" {\n",
387+
" \"item\": pa.Column(\n",
388+
" str, pa.Check.isin([\"apple\", \"orange\"]), coerce=True\n",
389+
" ),\n",
390+
" \"price\": pa.Column(float, pa.Check.gt(0), coerce=True),\n",
391+
" }\n",
392+
")"
389393
]
390394
},
391395
{
@@ -407,7 +411,9 @@
407411
"source": [
408412
"transformed_schema = schema.add_columns({\"expiry\": pa.Column(pd.Timestamp)})\n",
409413
"schema.remove_columns([\"item\"]) # remove the \"item\" column\n",
410-
"schema.update_column(\"price\", dtype=int) # update the datatype of the \"price\" column to integer"
414+
"schema.update_column(\n",
415+
" \"price\", dtype=int\n",
416+
") # update the datatype of the \"price\" column to integer"
411417
]
412418
},
413419
{

noxfile.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import os
99
import shutil
1010
import sys
11-
from typing import Optional, Tuple, List
11+
from typing import Optional
1212

1313
import nox
1414
from nox import Session
@@ -115,12 +115,11 @@ def requirements(session: Session) -> None:
115115

116116
def _testing_requirements(
117117
session: Session,
118-
extra: Optional[str] = None,
119-
pandas: Optional[str] = None,
120-
pydantic: Optional[str] = None,
121-
polars: Optional[str] = None,
118+
extra: str | None = None,
119+
pandas: str | None = None,
120+
pydantic: str | None = None,
121+
polars: str | None = None,
122122
) -> list[str]:
123-
124123
pandas = pandas or PANDAS_VERSIONS[-1]
125124
pydantic = pydantic or PYDANTIC_VERSIONS[-1]
126125
polars = polars or POLARS_VERSIONS[-1]
@@ -137,7 +136,7 @@ def _testing_requirements(
137136

138137
_requirements = list(set(_requirements))
139138

140-
_numpy: Optional[str] = None
139+
_numpy: str | None = None
141140
if pandas != "2.3.3" or (
142141
extra == "pyspark" and session.python in ("3.10",)
143142
):
@@ -178,7 +177,7 @@ def _testing_requirements(
178177

179178

180179
# the base module with no extras
181-
EXTRA_PYTHON_PYDANTIC: list[tuple[Optional[str], ...]] = [
180+
EXTRA_PYTHON_PYDANTIC: list[tuple[str | None, ...]] = [
182181
(None, None, None, None)
183182
]
184183
DATAFRAME_EXTRAS = {
@@ -222,10 +221,10 @@ def _testing_requirements(
222221
@nox.parametrize("extra, pandas, pydantic, polars", EXTRA_PYTHON_PYDANTIC)
223222
def tests(
224223
session: Session,
225-
extra: Optional[str] = None,
226-
pandas: Optional[str] = None,
227-
pydantic: Optional[str] = None,
228-
polars: Optional[str] = None,
224+
extra: str | None = None,
225+
pandas: str | None = None,
226+
pydantic: str | None = None,
227+
polars: str | None = None,
229228
) -> None:
230229
"""Run the test suite."""
231230

pandera/__init__.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,15 @@
22

33
from pandera._version import __version__
44

5-
65
try:
76
# Only add pandas to the top-level pandera namespace
87
# if pandas and numpy are installed
9-
import pandas as pd
108
import numpy as np
9+
import pandas as pd
1110

11+
from pandera import dtypes, typing
1212
from pandera._pandas_deprecated import *
1313
from pandera._pandas_deprecated import __all__ as _pandas_deprecated_all
14-
from pandera import dtypes
15-
from pandera import typing
1614

1715
__all__ = [
1816
"__version__",
@@ -33,8 +31,7 @@
3331
else:
3432
raise # Re-raise any other `ImportError` exceptions
3533

36-
from pandera import dtypes
37-
from pandera import typing
34+
from pandera import dtypes, typing
3835
from pandera.api.checks import Check
3936
from pandera.api.dataframe.model_components import (
4037
Field,

pandera/_pandas_deprecated.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,9 @@
88
import platform
99
import warnings
1010

11-
from packaging.version import parse
12-
13-
import pandas as pd
1411
import numpy as np
15-
12+
import pandas as pd
13+
from packaging.version import parse
1614

1715
_min_pandas_version = parse("2.1.1")
1816
_min_numpy_version = parse("1.24.4")
@@ -105,7 +103,6 @@
105103
)
106104
from pandera.schema_inference.pandas import infer_schema
107105

108-
109106
_future_warning = """Importing pandas-specific classes and functions from the
110107
top-level pandera module will be **removed in a future version of pandera**.
111108
If you're using pandera to validate pandas objects, we highly recommend updating
@@ -248,7 +245,6 @@ def __init_subclass__(cls, **kwargs):
248245

249246

250247
if platform.system() != "Windows":
251-
252248
from pandera.dtypes import Complex256, Float128
253249

254250
__all__.extend(["Complex256", "Float128"])

pandera/accessors/pandas_accessor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ class PanderaAccessor:
1616
def __init__(self, pandas_obj):
1717
"""Initialize the pandera accessor."""
1818
self._pandas_obj = pandas_obj
19-
self._schema: Optional[Schemas] = None
19+
self._schema: Schemas | None = None
2020

2121
@staticmethod
2222
def check_schema_type(schema: Schemas):
@@ -30,7 +30,7 @@ def add_schema(self, schema):
3030
return self._pandas_obj
3131

3232
@property
33-
def schema(self) -> Optional[Schemas]:
33+
def schema(self) -> Schemas | None:
3434
"""Access schema metadata."""
3535
return self._schema
3636

pandera/accessors/pyspark_sql_accessor.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22

33
import warnings
44
from typing import Optional
5-
from packaging import version
65

76
import pyspark
7+
from packaging import version
8+
89
from pandera.api.base.error_handler import ErrorHandler
910
from pandera.api.pyspark.container import DataFrameSchema
1011

@@ -18,8 +19,8 @@ class PanderaAccessor:
1819
def __init__(self, pyspark_obj):
1920
"""Initialize the pandera accessor."""
2021
self._pyspark_obj = pyspark_obj
21-
self._schema: Optional[Schemas] = None
22-
self._errors: Optional[Errors] = None
22+
self._schema: Schemas | None = None
23+
self._errors: Errors | None = None
2324

2425
@staticmethod
2526
def check_schema_type(schema: Schemas): # type: ignore
@@ -33,17 +34,17 @@ def add_schema(self, schema):
3334
return self._pyspark_obj
3435

3536
@property
36-
def schema(self) -> Optional[Schemas]: # type: ignore
37+
def schema(self) -> Schemas | None: # type: ignore
3738
"""Access schema metadata."""
3839
return self._schema
3940

4041
@property
41-
def errors(self) -> Optional[Errors]: # type: ignore
42+
def errors(self) -> Errors | None: # type: ignore
4243
"""Access errors details."""
4344
return self._errors
4445

4546
@errors.setter
46-
def errors(self, value: Optional[Errors]): # type: ignore
47+
def errors(self, value: Errors | None): # type: ignore
4748
"""Set errors details."""
4849
self._errors = value
4950

pandera/api/base/checks.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
"""Data validation base check."""
22

33
import inspect
4+
from collections.abc import Callable, Iterable
45
from itertools import chain
56
from typing import (
67
Any,
7-
Callable,
88
NamedTuple,
99
Optional,
1010
TypeVar,
1111
Union,
1212
no_type_check,
1313
)
14-
from collections.abc import Iterable
1514

1615
from pandera.api.function_dispatch import Dispatcher
17-
1816
from pandera.backends.base import BaseCheckBackend
1917

2018

@@ -33,9 +31,7 @@ class CheckResult(NamedTuple):
3331
class MetaCheck(type): # pragma: no cover
3432
"""Check metaclass."""
3533

36-
BACKEND_REGISTRY: dict[tuple[type, type], type[BaseCheckBackend]] = (
37-
{}
38-
) # noqa
34+
BACKEND_REGISTRY: dict[tuple[type, type], type[BaseCheckBackend]] = {} # noqa
3935
"""Registry of check backends implemented for specific data objects."""
4036

4137
CHECK_FUNCTION_REGISTRY: dict[str, Dispatcher] = {} # noqa
@@ -86,9 +82,9 @@ class BaseCheck(metaclass=MetaCheck):
8682

8783
def __init__(
8884
self,
89-
name: Optional[str] = None,
90-
error: Optional[str] = None,
91-
statistics: Optional[dict[str, Any]] = None,
85+
name: str | None = None,
86+
error: str | None = None,
87+
statistics: dict[str, Any] | None = None,
9288
):
9389
self.name = name
9490
self.error = error
@@ -122,8 +118,8 @@ def from_builtin_check_name(
122118
name: str,
123119
init_kwargs,
124120
error: Union[str, Callable],
125-
statistics: Optional[dict[str, Any]] = None,
126-
defaults: Optional[dict[str, Any]] = None,
121+
statistics: dict[str, Any] | None = None,
122+
defaults: dict[str, Any] | None = None,
127123
**check_kwargs,
128124
):
129125
"""Create a Check object from a built-in check's name."""

pandera/api/base/error_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def _count_failure_cases(failure_cases: Any) -> int:
5252
def collect_error(
5353
self,
5454
error_type: ErrorCategory,
55-
reason_code: Optional[SchemaErrorReason],
55+
reason_code: SchemaErrorReason | None,
5656
schema_error: SchemaError,
5757
original_exc: Union[BaseException, None] = None,
5858
):

0 commit comments

Comments
 (0)