10
10
11
11
rng = np .random .RandomState (1994 )
12
12
13
- shap_parameter_strategy = strategies .fixed_dictionaries ({
14
- 'max_depth' : strategies .integers (1 , 11 ),
15
- 'max_leaves' : strategies .integers (0 , 256 ),
16
- 'num_parallel_tree' : strategies .sampled_from ([1 , 10 ]),
17
- }).filter (lambda x : x ['max_depth' ] > 0 or x ['max_leaves' ] > 0 )
13
+ shap_parameter_strategy = strategies .fixed_dictionaries (
14
+ {
15
+ "max_depth" : strategies .integers (1 , 11 ),
16
+ "max_leaves" : strategies .integers (0 , 256 ),
17
+ "num_parallel_tree" : strategies .sampled_from ([1 , 10 ]),
18
+ }
19
+ ).filter (lambda x : x ["max_depth" ] > 0 or x ["max_leaves" ] > 0 )
18
20
19
21
20
22
class TestSYCLPredict (unittest .TestCase ):
@@ -25,25 +27,32 @@ def test_predict(self):
25
27
test_num_cols = [10 , 50 , 500 ]
26
28
for num_rows in test_num_rows :
27
29
for num_cols in test_num_cols :
28
- dtrain = xgb .DMatrix (np .random .randn (num_rows , num_cols ),
29
- label = [0 , 1 ] * int (num_rows / 2 ))
30
- dval = xgb .DMatrix (np .random .randn (num_rows , num_cols ),
31
- label = [0 , 1 ] * int (num_rows / 2 ))
32
- dtest = xgb .DMatrix (np .random .randn (num_rows , num_cols ),
33
- label = [0 , 1 ] * int (num_rows / 2 ))
34
- watchlist = [(dtrain , 'train' ), (dval , 'validation' )]
30
+ dtrain = xgb .DMatrix (
31
+ np .random .randn (num_rows , num_cols ),
32
+ label = [0 , 1 ] * int (num_rows / 2 ),
33
+ )
34
+ dval = xgb .DMatrix (
35
+ np .random .randn (num_rows , num_cols ),
36
+ label = [0 , 1 ] * int (num_rows / 2 ),
37
+ )
38
+ dtest = xgb .DMatrix (
39
+ np .random .randn (num_rows , num_cols ),
40
+ label = [0 , 1 ] * int (num_rows / 2 ),
41
+ )
42
+ watchlist = [(dtrain , "train" ), (dval , "validation" )]
35
43
res = {}
36
44
param = {
37
45
"objective" : "binary:logistic" ,
38
- ' eval_metric' : ' logloss' ,
39
- ' tree_method' : ' hist' ,
40
- ' device' : ' cpu' ,
41
- ' max_depth' : 1 ,
42
- ' verbosity' : 0
46
+ " eval_metric" : " logloss" ,
47
+ " tree_method" : " hist" ,
48
+ " device" : " cpu" ,
49
+ " max_depth" : 1 ,
50
+ " verbosity" : 0 ,
43
51
}
44
- bst = xgb .train (param , dtrain , iterations , evals = watchlist ,
45
- evals_result = res )
46
- assert self .non_increasing (res ["train" ]["logloss" ])
52
+ bst = xgb .train (
53
+ param , dtrain , iterations , evals = watchlist , evals_result = res
54
+ )
55
+ assert tm .non_increasing (res ["train" ]["logloss" ])
47
56
cpu_pred_train = bst .predict (dtrain , output_margin = True )
48
57
cpu_pred_test = bst .predict (dtest , output_margin = True )
49
58
cpu_pred_val = bst .predict (dval , output_margin = True )
@@ -53,15 +62,9 @@ def test_predict(self):
53
62
sycl_pred_test = bst .predict (dtest , output_margin = True )
54
63
sycl_pred_val = bst .predict (dval , output_margin = True )
55
64
56
- np .testing .assert_allclose (cpu_pred_train , sycl_pred_train ,
57
- rtol = 1e-6 )
58
- np .testing .assert_allclose (cpu_pred_val , sycl_pred_val ,
59
- rtol = 1e-6 )
60
- np .testing .assert_allclose (cpu_pred_test , sycl_pred_test ,
61
- rtol = 1e-6 )
62
-
63
- def non_increasing (self , L ):
64
- return all ((y - x ) < 0.001 for x , y in zip (L , L [1 :]))
65
+ np .testing .assert_allclose (cpu_pred_train , sycl_pred_train , rtol = 1e-6 )
66
+ np .testing .assert_allclose (cpu_pred_val , sycl_pred_val , rtol = 1e-6 )
67
+ np .testing .assert_allclose (cpu_pred_test , sycl_pred_test , rtol = 1e-6 )
65
68
66
69
@pytest .mark .skipif (** tm .no_sklearn ())
67
70
def test_multi_predict (self ):
@@ -70,8 +73,7 @@ def test_multi_predict(self):
70
73
71
74
n = 1000
72
75
X , y = make_regression (n , random_state = rng )
73
- X_train , X_test , y_train , y_test = train_test_split (X , y ,
74
- random_state = 123 )
76
+ X_train , X_test , y_train , y_test = train_test_split (X , y , random_state = 123 )
75
77
dtrain = xgb .DMatrix (X_train , label = y_train )
76
78
dtest = xgb .DMatrix (X_test )
77
79
@@ -100,17 +102,19 @@ def test_sklearn(self):
100
102
X_test , y_test = X [tr_size :, :], y [tr_size :]
101
103
102
104
# First with cpu_predictor
103
- params = {'tree_method' : 'hist' ,
104
- 'device' : 'cpu' ,
105
- 'n_jobs' : - 1 ,
106
- 'verbosity' : 0 ,
107
- 'seed' : 123 }
105
+ params = {
106
+ "tree_method" : "hist" ,
107
+ "device" : "cpu" ,
108
+ "n_jobs" : - 1 ,
109
+ "verbosity" : 0 ,
110
+ "seed" : 123 ,
111
+ }
108
112
m = xgb .XGBRegressor (** params ).fit (X_train , y_train )
109
113
cpu_train_score = m .score (X_train , y_train )
110
114
cpu_test_score = m .score (X_test , y_test )
111
115
112
116
# Now with sycl_predictor
113
- params [' device' ] = ' sycl'
117
+ params [" device" ] = " sycl"
114
118
m .set_params (** params )
115
119
116
120
# m = xgb.XGBRegressor(**params).fit(X_train, y_train)
@@ -121,8 +125,9 @@ def test_sklearn(self):
121
125
assert np .allclose (cpu_train_score , sycl_train_score )
122
126
assert np .allclose (cpu_test_score , sycl_test_score )
123
127
124
- @given (strategies .integers (1 , 10 ),
125
- tm .make_dataset_strategy (), shap_parameter_strategy )
128
+ @given (
129
+ strategies .integers (1 , 10 ), tm .make_dataset_strategy (), shap_parameter_strategy
130
+ )
126
131
@settings (deadline = None )
127
132
def test_shap (self , num_rounds , dataset , param ):
128
133
if dataset .name .endswith ("-l1" ): # not supported by the exact tree method
@@ -138,8 +143,9 @@ def test_shap(self, num_rounds, dataset, param):
138
143
assume (len (dataset .y ) > 0 )
139
144
assert np .allclose (np .sum (shap , axis = len (shap .shape ) - 1 ), margin , 1e-3 , 1e-3 )
140
145
141
- @given (strategies .integers (1 , 10 ),
142
- tm .make_dataset_strategy (), shap_parameter_strategy )
146
+ @given (
147
+ strategies .integers (1 , 10 ), tm .make_dataset_strategy (), shap_parameter_strategy
148
+ )
143
149
@settings (deadline = None , max_examples = 20 )
144
150
def test_shap_interactions (self , num_rounds , dataset , param ):
145
151
if dataset .name .endswith ("-l1" ): # not supported by the exact tree method
@@ -153,5 +159,9 @@ def test_shap_interactions(self, num_rounds, dataset, param):
153
159
shap = bst .predict (test_dmat , pred_interactions = True )
154
160
margin = bst .predict (test_dmat , output_margin = True )
155
161
assume (len (dataset .y ) > 0 )
156
- assert np .allclose (np .sum (shap , axis = (len (shap .shape ) - 1 , len (shap .shape ) - 2 )), margin ,
157
- 1e-3 , 1e-3 )
162
+ assert np .allclose (
163
+ np .sum (shap , axis = (len (shap .shape ) - 1 , len (shap .shape ) - 2 )),
164
+ margin ,
165
+ 1e-3 ,
166
+ 1e-3 ,
167
+ )
0 commit comments