@@ -67,6 +67,8 @@ def convert_xgb_predictions(y_pred, objective):
67
67
help = 'Control a balance of positive and negative weights' )
68
68
parser .add_argument ('--count-dmatrix' , default = False , action = 'store_true' ,
69
69
help = 'Count DMatrix creation in time measurements' )
70
+ parser .add_argument ('--inplace-predict' , default = False , action = 'store_true' ,
71
+ help = 'Count DMatrix creation in time measurements' )
70
72
parser .add_argument ('--single-precision-histogram' , default = False , action = 'store_true' ,
71
73
help = 'Build histograms instead of double precision' )
72
74
parser .add_argument ('--enable-experimental-json-serialization' , default = True ,
@@ -135,9 +137,13 @@ def fit():
135
137
dtrain = xgb .DMatrix (X_train , y_train )
136
138
return xgb .train (xgb_params , dtrain , params .n_estimators )
137
139
138
- def predict ():
139
- dtest = xgb .DMatrix (X_test , y_test )
140
- return booster .predict (dtest )
140
+ if params .inplace_predict == False :
141
+ def predict ():
142
+ dtest = xgb .DMatrix (X_test , y_test )
143
+ return booster .predict (dtest )
144
+ else :
145
+ def predict ():
146
+ return booster .inplace_predict (np .ascontiguousarray (X_test .values , dtype = np .float32 ))
141
147
else :
142
148
def fit ():
143
149
return xgb .train (xgb_params , dtrain , params .n_estimators )
@@ -150,8 +156,7 @@ def predict():
150
156
train_metric = metric_func (y_pred , y_train )
151
157
152
158
predict_time , y_pred = measure_function_time (predict , params = params )
153
- test_metric = metric_func (
154
- convert_xgb_predictions (y_pred , params .objective ), y_test )
159
+ test_metric = metric_func (convert_xgb_predictions (y_pred , params .objective ), y_test )
155
160
156
161
print_output (library = 'xgboost' , algorithm = f'gradient_boosted_trees_{ task } ' ,
157
162
stages = ['training' , 'prediction' ], columns = columns ,
0 commit comments