Skip to content

Commit 4e635ac

Browse files
committed
fix lint
1 parent 651619b commit 4e635ac

File tree

16 files changed

+207
-249
lines changed

16 files changed

+207
-249
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
'Sphinx>=3,<3.3',
5555
'sphinx_rtd_theme>=0.2.4,<0.5',
5656
'autodocsumm>=0.1.10',
57-
'mistune>=0.7,<3.1',
57+
'mistune>=0.7,<2.0',
5858
'Jinja2>=2,<3.1',
5959

6060
# fails on Sphinx < v3.4

tests/labeling/test_helpers.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,31 +19,35 @@ def test_merge_labeling_and_true():
1919
lambda df: True,
2020
lambda df: True
2121
]
22-
assert 1 == merge_binary_labeling_functions(functions, and_connected=True)(pd.DataFrame())
22+
assert 1 == merge_binary_labeling_functions(
23+
functions, and_connected=True)(pd.DataFrame())
2324

2425

2526
def test_merge_labeling_and_false():
2627
functions = [
2728
lambda df: True,
2829
lambda df: False
2930
]
30-
assert 0 == merge_binary_labeling_functions(functions, and_connected=True)(pd.DataFrame())
31+
assert 0 == merge_binary_labeling_functions(
32+
functions, and_connected=True)(pd.DataFrame())
3133

3234

3335
def test_merge_labeling_or_true():
3436
functions = [
3537
lambda df: False,
3638
lambda df: True
3739
]
38-
assert 1 == merge_binary_labeling_functions(functions, and_connected=False)(pd.DataFrame())
40+
assert 1 == merge_binary_labeling_functions(
41+
functions, and_connected=False)(pd.DataFrame())
3942

4043

4144
def test_merge_labeling_or_false():
4245
functions = [
4346
lambda df: False,
4447
lambda df: False
4548
]
46-
assert 0 == merge_binary_labeling_functions(functions, and_connected=False)(pd.DataFrame())
49+
assert 0 == merge_binary_labeling_functions(
50+
functions, and_connected=False)(pd.DataFrame())
4751

4852

4953
def test_categorical_presence_true():

tests/primitives/test_postprocessing.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@ def _run(self, y, y_hat, value):
2323
threshold.fit(y, y_hat)
2424

2525
assert threshold._threshold == value
26-
binary_y_hat, detected_threshold, scores = threshold.apply_threshold(y_hat)
26+
binary_y_hat, detected_threshold, scores = threshold.apply_threshold(
27+
y_hat)
2728
np.testing.assert_allclose(binary_y_hat, y)
2829

2930
def test_1d(self):

tests/test___init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,8 @@ def merge_work_orders_notifications_data():
9696
changed_wo_data['WTG'] = ['A001', 'A001']
9797
changed_notif_data = NOTIFICATIONS_DATA.copy()
9898
# matching the output of the merge
99-
changed_notif_data['Functional location_y'] = changed_notif_data.pop('Functional location')
99+
changed_notif_data['Functional location_y'] = changed_notif_data.pop(
100+
'Functional location')
100101
changed_notif_data['Functional location description_y'] = (
101102
changed_notif_data.pop('Functional location description'))
102103
# matching the notifications update

tests/test_core.py

Lines changed: 44 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import numpy as np
22
import pandas as pd
33
from mlblocks import MLBlock
4+
45
from zephyr_ml.core import DEFAULT_METRICS, Zephyr
56

67

@@ -10,8 +11,10 @@ class TestZephyr:
1011
def base_dfs():
1112
alarms_df = pd.DataFrame({
1213
'COD_ELEMENT': [0, 0],
13-
'DAT_START': [pd.Timestamp('2022-01-01 00:00:00'), pd.Timestamp('2022-03-01 11:12:13')],
14-
'DAT_END': [pd.Timestamp('2022-01-01 13:00:00'), pd.Timestamp('2022-03-02 11:12:13')],
14+
'DAT_START': [pd.Timestamp('2022-01-01 00:00:00'),
15+
pd.Timestamp('2022-03-01 11:12:13')],
16+
'DAT_END': [pd.Timestamp('2022-01-01 13:00:00'),
17+
pd.Timestamp('2022-03-02 11:12:13')],
1518
'IND_DURATION': [0.5417, 1.0],
1619
'COD_ALARM': [12345, 98754],
1720
'COD_ALARM_INT': [12345, 98754],
@@ -20,8 +23,10 @@ def base_dfs():
2023
})
2124
stoppages_df = pd.DataFrame({
2225
'COD_ELEMENT': [0, 0],
23-
'DAT_START': [pd.Timestamp('2022-01-01 00:00:00'), pd.Timestamp('2022-03-01 11:12:13')],
24-
'DAT_END': [pd.Timestamp('2022-01-08 11:07:17'), pd.Timestamp('2022-03-01 17:00:13')],
26+
'DAT_START': [pd.Timestamp('2022-01-01 00:00:00'),
27+
pd.Timestamp('2022-03-01 11:12:13')],
28+
'DAT_END': [pd.Timestamp('2022-01-08 11:07:17'),
29+
pd.Timestamp('2022-03-01 17:00:13')],
2530
'DES_WO_NAME': ['stoppage name 1', 'stoppage name 2'],
2631
'DES_COMMENTS': ['description of stoppage 1', 'description of stoppage 2'],
2732
'COD_WO': [12345, 67890],
@@ -40,12 +45,15 @@ def base_dfs():
4045
'COD_ORDER': [12345, 67890],
4146
'IND_QUANTITY': [1, -20],
4247
'COD_MATERIAL_SAP': [36052411, 67890],
43-
'DAT_POSTING': [pd.Timestamp('2022-01-01 00:00:00'), pd.Timestamp('2022-03-01 00:00:00')],
48+
'DAT_POSTING': [pd.Timestamp('2022-01-01 00:00:00'),
49+
pd.Timestamp('2022-03-01 00:00:00')],
4450
'COD_MAT_DOC': [77889900, 12345690],
4551
'DES_MEDIUM': ['Description of notification 1', 'Description of notification 2'],
4652
'COD_NOTIF': [567890123, 32109877],
47-
'DAT_MALF_START': [pd.Timestamp('2021-12-25 18:07:10'), pd.Timestamp('2022-02-28 06:04:00')],
48-
'DAT_MALF_END': [pd.Timestamp('2022-01-08 11:07:17'), pd.Timestamp('2022-03-01 17:00:13')],
53+
'DAT_MALF_START': [pd.Timestamp('2021-12-25 18:07:10'),
54+
pd.Timestamp('2022-02-28 06:04:00')],
55+
'DAT_MALF_END': [pd.Timestamp('2022-01-08 11:07:17'),
56+
pd.Timestamp('2022-03-01 17:00:13')],
4957
'IND_BREAKDOWN_DUR': [14.1378, 2.4792],
5058
'FUNCT_LOC_DES': ['location description 1', 'location description 2'],
5159
'COD_ALARM': [12345, 12345],
@@ -54,15 +62,19 @@ def base_dfs():
5462
work_orders_df = pd.DataFrame({
5563
'COD_ELEMENT': [0, 0],
5664
'COD_ORDER': [12345, 67890],
57-
'DAT_BASIC_START': [pd.Timestamp('2022-01-01 00:00:00'), pd.Timestamp('2022-03-01 00:00:00')],
58-
'DAT_BASIC_END': [pd.Timestamp('2022-01-09 00:00:00'), pd.Timestamp('2022-03-02 00:00:00')],
65+
'DAT_BASIC_START': [pd.Timestamp('2022-01-01 00:00:00'),
66+
pd.Timestamp('2022-03-01 00:00:00')],
67+
'DAT_BASIC_END': [pd.Timestamp('2022-01-09 00:00:00'),
68+
pd.Timestamp('2022-03-02 00:00:00')],
5969
'COD_EQUIPMENT': [98765, 98765],
6070
'COD_MAINT_PLANT': ['ABC', 'ABC'],
6171
'COD_MAINT_ACT_TYPE': ['XYZ', 'XYZ'],
6272
'COD_CREATED_BY': ['A1234', 'B6789'],
6373
'COD_ORDER_TYPE': ['A', 'B'],
64-
'DAT_REFERENCE': [pd.Timestamp('2022-01-01 00:00:00'), pd.Timestamp('2022-03-01 00:00:00')],
65-
'DAT_CREATED_ON': [pd.Timestamp('2022-03-01 00:00:00'), pd.Timestamp('2022-04-18 00:00:00')],
74+
'DAT_REFERENCE': [pd.Timestamp('2022-01-01 00:00:00'),
75+
pd.Timestamp('2022-03-01 00:00:00')],
76+
'DAT_CREATED_ON': [pd.Timestamp('2022-03-01 00:00:00'),
77+
pd.Timestamp('2022-04-18 00:00:00')],
6678
'DAT_VALID_END': [pd.NaT, pd.NaT],
6779
'DAT_VALID_START': [pd.NaT, pd.NaT],
6880
'COD_SYSTEM_STAT': ['ABC XYZ', 'LMN OPQ'],
@@ -86,7 +98,8 @@ def base_dfs():
8698
'PI_LOCAL_SITE_NAME': ['LOC0']
8799
})
88100
pidata_df = pd.DataFrame({
89-
'time': [pd.Timestamp('2022-01-02 13:21:01'), pd.Timestamp('2022-03-08 13:21:01')],
101+
'time': [pd.Timestamp('2022-01-02 13:21:01'),
102+
pd.Timestamp('2022-03-08 13:21:01')],
90103
'COD_ELEMENT': [0, 0],
91104
'val1': [9872.0, 559.0],
92105
'val2': [10.0, -7.0]
@@ -153,40 +166,47 @@ def setup_class(cls):
153166

154167
def test_initialize_class(self):
155168
_ = Zephyr()
156-
157169

158170
def test_generate_entityset(self):
159171
zephyr = Zephyr()
160-
zephyr.generate_entityset(**self.__class__.kwargs["generate_entityset"])
172+
zephyr.generate_entityset(
173+
**self.__class__.kwargs["generate_entityset"])
161174
es = zephyr.get_entityset()
162175
assert es is not None
163176
assert es.id == 'pidata'
164177

165178
def test_generate_label_times(self):
166179
zephyr = Zephyr()
167-
zephyr.generate_entityset(**self.__class__.kwargs["generate_entityset"])
168-
zephyr.generate_label_times(**self.__class__.kwargs["generate_label_times"])
180+
zephyr.generate_entityset(
181+
**self.__class__.kwargs["generate_entityset"])
182+
zephyr.generate_label_times(
183+
**self.__class__.kwargs["generate_label_times"])
169184
label_times = zephyr.get_label_times(visualize=False)
170185
assert label_times is not None
171186

172187
def test_generate_feature_matrix_and_labels(self):
173188
zephyr = Zephyr()
174-
zephyr.generate_entityset(**self.__class__.kwargs["generate_entityset"])
175-
zephyr.generate_label_times(**self.__class__.kwargs["generate_label_times"])
189+
zephyr.generate_entityset(
190+
**self.__class__.kwargs["generate_entityset"])
191+
zephyr.generate_label_times(
192+
**self.__class__.kwargs["generate_label_times"])
176193
zephyr.generate_feature_matrix(
177194
**self.__class__.kwargs["generate_feature_matrix"])
178-
feature_matrix, label_col_name, features= zephyr.get_feature_matrix()
195+
feature_matrix, label_col_name, features = zephyr.get_feature_matrix()
179196
assert feature_matrix is not None
180197
assert label_col_name in feature_matrix.columns
181198
assert features is not None
182199

183200
def test_generate_train_test_split(self):
184201
zephyr = Zephyr()
185-
zephyr.generate_entityset(**self.__class__.kwargs["generate_entityset"])
186-
zephyr.generate_label_times(**self.__class__.kwargs["generate_label_times"])
202+
zephyr.generate_entityset(
203+
**self.__class__.kwargs["generate_entityset"])
204+
zephyr.generate_label_times(
205+
**self.__class__.kwargs["generate_label_times"])
187206
zephyr.generate_feature_matrix(
188207
**self.__class__.kwargs["generate_feature_matrix"])
189-
zephyr.generate_train_test_split(**self.__class__.kwargs["generate_train_test_split"])
208+
zephyr.generate_train_test_split(
209+
**self.__class__.kwargs["generate_train_test_split"])
190210
train_test_split = zephyr.get_train_test_split()
191211
assert train_test_split is not None
192212
X_train, X_test, y_train, y_test = train_test_split
@@ -217,7 +237,8 @@ def test_fit_pipeline_no_visual(self):
217237
def test_fit_pipeline_visual(self):
218238
zephyr = Zephyr()
219239
zephyr.set_train_test_split(*self.base_train_test_split())
220-
output = zephyr.fit_pipeline(visual=True, **self.__class__.kwargs["fit_pipeline"])
240+
output = zephyr.fit_pipeline(
241+
visual=True, **self.__class__.kwargs["fit_pipeline"])
221242
assert isinstance(output, dict)
222243
assert list(output.keys()) == ['threshold', 'scores']
223244
pipeline = zephyr.get_fitted_pipeline()

tests/test_feature_engineering.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -170,8 +170,10 @@ def test_process_signals_pidata(pidata_es, transformations, aggregations):
170170
"fft.mean.mean_value": [9872, None, 559]
171171
})
172172
expected['COD_ELEMENT'] = expected['COD_ELEMENT'].astype('category')
173-
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype('float64')
174-
processed['fft.mean.mean_value'] = processed['fft.mean.mean_value'].astype('float64')
173+
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype(
174+
'float64')
175+
processed['fft.mean.mean_value'] = processed['fft.mean.mean_value'].astype(
176+
'float64')
175177

176178
assert pidata_es['pidata_processed'].shape[0] == 3
177179
assert pidata_es['pidata_processed'].shape[1] == 4
@@ -202,8 +204,10 @@ def test_process_signals_pidata_replace(pidata_es, transformations, aggregations
202204
"fft.mean.mean_value": [9872, None, 559]
203205
})
204206
expected['COD_ELEMENT'] = expected['COD_ELEMENT'].astype('category')
205-
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype('float64')
206-
processed['fft.mean.mean_value'] = processed['fft.mean.mean_value'].astype('float64')
207+
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype(
208+
'float64')
209+
processed['fft.mean.mean_value'] = processed['fft.mean.mean_value'].astype(
210+
'float64')
207211

208212
assert pidata_es['pidata'].shape[0] == 3
209213
assert pidata_es['pidata'].shape[1] == 4
@@ -233,7 +237,8 @@ def test_process_signals_scada(scada_es, transformations, aggregations):
233237
"fft.mean.mean_value": [1002, None, 56.8]
234238
})
235239
expected['COD_ELEMENT'] = expected['COD_ELEMENT'].astype('category')
236-
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype('float64')
240+
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype(
241+
'float64')
237242
after = scada_es['scada'].copy()
238243

239244
assert scada_es['scada_processed'].shape[0] == 3
@@ -263,7 +268,8 @@ def test_process_signals_scada_replace(scada_es, transformations, aggregations):
263268
"fft.mean.mean_value": [1002, None, 56.8]
264269
})
265270
expected['COD_ELEMENT'] = expected['COD_ELEMENT'].astype('category')
266-
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype('float64')
271+
expected['fft.mean.mean_value'] = expected['fft.mean.mean_value'].astype(
272+
'float64')
267273

268274
assert scada_es['scada'].shape[0] == 3
269275
assert scada_es['scada'].shape[1] == 4

tests/test_metadata.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ def test_default_scada_mapped_kwargs():
1010

1111

1212
def test_default_pidata_mapped_kwargs():
13-
expected = {**DEFAULT_ES_KWARGS, 'pidata': DEFAULT_ES_TYPE_KWARGS['pidata']}
13+
expected = {**DEFAULT_ES_KWARGS,
14+
'pidata': DEFAULT_ES_TYPE_KWARGS['pidata']}
1415
actual = get_mapped_kwargs('pidata')
1516
assert expected == actual
1617

zephyr_ml/__init__.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,16 +9,9 @@
99
import os
1010

1111
from zephyr_ml.core import Zephyr
12-
from zephyr_ml.entityset import (
13-
# create_pidata_entityset,
14-
# create_scada_entityset,
15-
_create_entityset,
16-
VALIDATE_DATA_FUNCTIONS,
17-
)
12+
from zephyr_ml.entityset import VALIDATE_DATA_FUNCTIONS, _create_entityset
1813
from zephyr_ml.labeling import DataLabeler
1914

20-
MLBLOCKS_PRIMITIVES = os.path.join(os.path.dirname(__file__), "primitives", "jsons")
15+
MLBLOCKS_PRIMITIVES = os.path.join(
16+
os.path.dirname(__file__), "primitives", "jsons")
2117
MLBLOCKS_PIPELINES = os.path.join(os.path.dirname(__file__), "pipelines")
22-
# import os, sys
23-
24-
# sys.path.append(os.path.dirname(os.path.realpath(__file__)))

0 commit comments

Comments
 (0)