Skip to content

Commit 4526e01

Browse files
author
Nabil Fayak
committed
lint fix
1 parent 92f1154 commit 4526e01

File tree

13 files changed

+65
-44
lines changed

13 files changed

+65
-44
lines changed

checkmates/exceptions/exceptions.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,16 +13,19 @@ class ObjectiveNotFoundError(Exception):
1313

1414
pass
1515

16+
1617
class MethodPropertyNotFoundError(Exception):
1718
"""Exception to raise when a class is does not have an expected method or property."""
1819

1920
pass
2021

22+
2123
class ComponentNotYetFittedError(Exception):
2224
"""An exception to be raised when predict/predict_proba/transform is called on a component without fitting first."""
2325

2426
pass
2527

28+
2629
class ObjectiveCreationError(Exception):
2730
"""Exception when get_objective tries to instantiate an objective and required args are not provided."""
2831

checkmates/objectives/utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
"""Utility methods for CheckMates objectives."""
2-
import pandas as pd
32
from typing import Optional
43

4+
import pandas as pd
5+
56
from checkmates import objectives
67
from checkmates.exceptions import ObjectiveCreationError, ObjectiveNotFoundError
78
from checkmates.objectives.objective_base import ObjectiveBase
8-
from checkmates.problem_types import handle_problem_types
9+
from checkmates.problem_types import ProblemTypes, handle_problem_types
910
from checkmates.utils.gen_utils import _get_subclasses
10-
from checkmates.problem_types import ProblemTypes
11-
1211
from checkmates.utils.logger import get_logger
1312

1413
logger = get_logger(__file__)
@@ -97,11 +96,12 @@ def get_objective(objective, return_instance=False, **kwargs):
9796

9897
return objective_class
9998

99+
100100
def get_problem_type(
101101
input_problem_type: Optional[str],
102102
target_data: pd.Series,
103103
) -> ProblemTypes:
104-
"""helper function to determine if classification problem is binary or multiclass dependent on target variable values."""
104+
"""Helper function to determine if classification problem is binary or multiclass dependent on target variable values."""
105105
if not input_problem_type:
106106
raise ValueError("problem type is required")
107107
if input_problem_type.lower() == "classification":

checkmates/pipelines/__init__.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
"""General CheckMates pipelines."""
2+
13
from checkmates.pipelines.component_base_meta import ComponentBaseMeta
24
from checkmates.pipelines.component_base import ComponentBase
35
from checkmates.pipelines.transformers import Transformer
@@ -9,5 +11,9 @@
911
TimeSeriesImputer,
1012
TimeSeriesRegularizer,
1113
)
12-
from checkmates.pipelines.utils import _make_component_list_from_actions, split_data, drop_infinity
13-
from checkmates.pipelines.training_validation_split import TrainingValidationSplit
14+
from checkmates.pipelines.utils import (
15+
_make_component_list_from_actions,
16+
split_data,
17+
drop_infinity,
18+
)
19+
from checkmates.pipelines.training_validation_split import TrainingValidationSplit

checkmates/pipelines/component_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,4 +280,4 @@ def _handle_nullable_types(self, X=None, y=None):
280280
handle_integer_nullable=y_int_incompatible,
281281
)
282282

283-
return X, y
283+
return X, y

checkmates/pipelines/component_base_meta.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,4 +41,4 @@ def _check_for_fit(self, X=None, y=None):
4141
else:
4242
return method(self, X, y)
4343

44-
return _check_for_fit
44+
return _check_for_fit

checkmates/pipelines/components.py

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,29 @@
11
"""Initalizes an transformer that selects specified columns in input data."""
2+
import warnings
23
from abc import abstractmethod
34
from functools import wraps
5+
46
import pandas as pd
57
import woodwork as ww
6-
import warnings
78
from sklearn.impute import SimpleImputer as SkImputer
8-
9-
from woodwork.logical_types import Datetime
9+
from woodwork.logical_types import (
10+
BooleanNullable,
11+
Datetime,
12+
Double,
13+
)
1014
from woodwork.statistics_utils import infer_frequency
1115

12-
from checkmates.pipelines.transformers import Transformer
13-
from checkmates.pipelines.transformers import SimpleImputer
1416
from checkmates.exceptions import ComponentNotYetFittedError
1517
from checkmates.pipelines import ComponentBaseMeta
18+
from checkmates.pipelines.transformers import SimpleImputer, Transformer
1619
from checkmates.utils import infer_feature_types
1720
from checkmates.utils.nullable_type_utils import (
18-
_get_new_logical_types_for_imputed_data,
1921
_determine_fractional_type,
2022
_determine_non_nullable_equivalent,
23+
_get_new_logical_types_for_imputed_data,
2124
)
2225

2326

24-
2527
class ColumnSelector(Transformer):
2628
"""Initalizes an transformer that selects specified columns in input data.
2729
@@ -211,8 +213,10 @@ def transform(self, X, y=None):
211213
modified_cols = self._modify_columns(cols, X, y)
212214
return infer_feature_types(modified_cols)
213215

216+
214217
"""Transformer to drop rows specified by row indices."""
215218

219+
216220
class DropRowsTransformer(Transformer):
217221
"""Transformer to drop rows specified by row indices.
218222
@@ -300,8 +304,10 @@ def transform(self, X, y=None):
300304
y_t = y_t.ww.drop(self.indices_to_drop)
301305
return X_t, y_t
302306

307+
303308
"""Component that imputes missing data according to a specified imputation strategy per column."""
304309

310+
305311
class PerColumnImputer(Transformer):
306312
"""Imputes missing data according to a specified imputation strategy per column.
307313
@@ -396,8 +402,10 @@ def transform(self, X, y=None):
396402
X_t.ww.init(schema=original_schema.get_subset_schema(X_t.columns))
397403
return X_t
398404

405+
399406
"""Component that imputes missing target data according to a specified imputation strategy."""
400407

408+
401409
class TargetImputerMeta(ComponentBaseMeta):
402410
"""A version of the ComponentBaseMeta class which handles when input features is None."""
403411

@@ -531,13 +539,9 @@ def fit_transform(self, X, y):
531539
"""
532540
return self.fit(X, y).transform(X, y)
533541

542+
534543
"""Component that imputes missing data according to a specified timeseries-specific imputation strategy."""
535-
import pandas as pd
536-
import woodwork as ww
537-
from woodwork.logical_types import (
538-
BooleanNullable,
539-
Double,
540-
)
544+
541545

542546
class TimeSeriesImputer(Transformer):
543547
"""Imputes missing data according to a specified timeseries-specific imputation strategy.
@@ -776,8 +780,10 @@ def _handle_nullable_types(self, X=None, y=None):
776780

777781
return X, y
778782

783+
779784
"""Transformer that regularizes a dataset with an uninferrable offset frequency for time series problems."""
780785

786+
781787
class TimeSeriesRegularizer(Transformer):
782788
"""Transformer that regularizes an inconsistently spaced datetime column.
783789
@@ -1086,4 +1092,4 @@ def transform(self, X, y=None):
10861092

10871093
cleaned_x.ww.init()
10881094

1089-
return cleaned_x, cleaned_y
1095+
return cleaned_x, cleaned_y

checkmates/pipelines/training_validation_split.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,4 +99,4 @@ def split(self, X, y=None):
9999
stratify=self.stratify,
100100
random_state=self.random_seed,
101101
)
102-
return iter([(train, test)])
102+
return iter([(train, test)])

checkmates/pipelines/transformers.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,16 @@
11
"""A component that may or may not need fitting that transforms data. These components are used before an estimator."""
22
from abc import abstractmethod
33

4-
from checkmates.exceptions import MethodPropertyNotFoundError
5-
from checkmates.pipelines import ComponentBase
6-
from checkmates.utils import infer_feature_types
74
import pandas as pd
85
import woodwork
96
from sklearn.impute import SimpleImputer as SkImputer
107

11-
from checkmates.pipelines.transformers import Transformer
8+
from checkmates.exceptions import MethodPropertyNotFoundError
9+
from checkmates.pipelines import ComponentBase
1210
from checkmates.utils import infer_feature_types
1311
from checkmates.utils.nullable_type_utils import _get_new_logical_types_for_imputed_data
1412

13+
1514
class Transformer(ComponentBase):
1615
"""A component that may or may not need fitting that transforms data. These components are used before an estimator.
1716
@@ -83,8 +82,10 @@ def fit_transform(self, X, y=None):
8382
def _get_feature_provenance(self):
8483
return {}
8584

85+
8686
"""Component that imputes missing data according to a specified imputation strategy."""
8787

88+
8889
class SimpleImputer(Transformer):
8990
"""Imputes missing data according to a specified imputation strategy. Natural language columns are ignored.
9091
@@ -235,4 +236,4 @@ def fit_transform(self, X, y=None):
235236
Returns:
236237
pd.DataFrame: Transformed X
237238
"""
238-
return self.fit(X, y).transform(X, y)
239+
return self.fit(X, y).transform(X, y)

checkmates/pipelines/utils.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
"""Utility methods for EvalML pipelines."""
2-
from sklearn.model_selection import ShuffleSplit, StratifiedShuffleSplit
32
from typing import Union
3+
44
import numpy as np
55
import pandas as pd
6-
6+
from sklearn.model_selection import ShuffleSplit, StratifiedShuffleSplit
77

88
from checkmates.data_checks import DataCheckActionCode
99
from checkmates.pipelines.components import ( # noqa: F401
@@ -14,11 +14,9 @@
1414
TimeSeriesImputer,
1515
TimeSeriesRegularizer,
1616
)
17-
from checkmates.utils import infer_feature_types
1817
from checkmates.pipelines.training_validation_split import TrainingValidationSplit
1918
from checkmates.problem_types import is_classification, is_regression, is_time_series
20-
21-
19+
from checkmates.utils import infer_feature_types
2220

2321

2422
def _make_component_list_from_actions(actions):
@@ -70,6 +68,7 @@ def _make_component_list_from_actions(actions):
7068

7169
return components
7270

71+
7372
def split_data(
7473
X,
7574
y,
@@ -163,10 +162,11 @@ def split_data(
163162

164163
return X_train, X_test, y_train, y_test
165164

165+
166166
def drop_infinity(
167167
data: Union[pd.DataFrame, pd.Series],
168168
) -> Union[pd.DataFrame, pd.Series]:
169-
"""Removes infinity values"""
169+
"""Removes infinity values."""
170170
ww = data.ww._schema is not None
171171
replace = data.ww.replace if ww else data.replace
172172
return replace([np.inf, -np.inf], np.nan)

checkmates/utils/base_meta.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,4 +43,4 @@ def __new__(cls, name, bases, dct):
4343
property_orig.__delattr__,
4444
property_orig.__doc__,
4545
)
46-
return super().__new__(cls, name, bases, dct)
46+
return super().__new__(cls, name, bases, dct)

0 commit comments

Comments
 (0)