@@ -112,11 +112,11 @@ in order to obtain a 95% confidence for our prediction intervals.
112112 " cv_plus" : dict (method = " plus" , cv = 10 ),
113113 " cv_minmax" : dict (method = " minmax" , cv = 10 ),
114114 }
115- y_preds = {}
115+ y_pred, y_pis = {}, {}
116116 for strategy, params in STRATEGIES .items():
117- mapie = MapieRegressor(polyn_model, alpha = 0.05 , ensemble = False , ** params)
117+ mapie = MapieRegressor(polyn_model, ensemble = False , ** params)
118118 mapie.fit(X_train, y_train)
119- y_preds [strategy] = mapie.predict(X_test)[:, :, 0 ]
119+ y_pred [strategy], y_pis[strategy] = mapie.predict(X_test, alpha = 0.05 )
120120
121121 Let’s now compare the confidence intervals with the predicted intervals with obtained
122122by the Jackknife+, Jackknife-minmax, CV+, and CV-minmax strategies.
@@ -159,9 +159,9 @@ by the Jackknife+, Jackknife-minmax, CV+, and CV-minmax strategies.
159159 X_test.ravel(),
160160 y_mesh.ravel(),
161161 1.96 * noise,
162- y_preds [strategy][:, 0 ].ravel(),
163- y_preds [strategy][:, 1 ].ravel(),
164- y_preds [strategy][:, 2 ].ravel(),
162+ y_pred [strategy].ravel(),
163+ y_pis [strategy][:, 0 , 0 ].ravel(),
164+ y_pis [strategy][:, 1 , 0 ].ravel(),
165165 ax = coord,
166166 title = strategy
167167 )
@@ -178,7 +178,7 @@ Let’s confirm this by comparing the prediction interval widths over
178178
179179 fig, ax = plt.subplots(1 , 1 , figsize = (7 , 5 ))
180180 for strategy in STRATEGIES :
181- ax.plot(X_test, y_preds [strategy][:, 2 ] - y_preds [strategy][:, 1 ])
181+ ax.plot(X_test, y_pis [strategy][:, 1 , 0 ] - y_pis [strategy][:, 0 , 0 ])
182182 ax.axhline(1.96 * 2 * noise, ls = " --" , color = " k" )
183183 ax.set_xlabel(" x" )
184184 ax.set_ylabel(" Prediction Interval Width" )
@@ -311,11 +311,11 @@ strategies.
311311 " cv_plus" : dict (method = " plus" , cv = 10 ),
312312 " cv_minmax" : dict (method = " minmax" , cv = 10 ),
313313 }
314- prediction_interval = {}
314+ y_pred, y_pis = {}, {}
315315 for strategy, params in STRATEGIES .items():
316- mapie = MapieRegressor(polyn_model, alpha = 0.05 , ensemble = False , ** params)
316+ mapie = MapieRegressor(polyn_model, ensemble = False , ** params)
317317 mapie.fit(X_train, y_train)
318- y_preds [strategy] = mapie.predict(X_test)[:, :, 0 ]
318+ y_pred [strategy], y_pis[strategy] = mapie.predict(X_test, alpha = 0.05 )
319319
320320
321321 .. code :: python
@@ -331,9 +331,9 @@ strategies.
331331 X_test.ravel(),
332332 y_mesh.ravel(),
333333 1.96 * noise,
334- y_preds [strategy][:, 0 ].ravel(),
335- y_preds [strategy][:, 1 ].ravel(),
336- y_preds [strategy][:, 2 ].ravel(),
334+ y_pred [strategy].ravel(),
335+ y_pis [strategy][:, 0 , : ].ravel(),
336+ y_pis [strategy][:, 1 , : ].ravel(),
337337 ax = coord,
338338 title = strategy
339339 )
@@ -354,7 +354,7 @@ Let's now compare the prediction interval widths between all strategies.
354354 fig, ax = plt.subplots(1 , 1 , figsize = (7 , 5 ))
355355 ax.set_yscale(" log" )
356356 for strategy in STRATEGIES :
357- ax.plot(X_test, y_preds [strategy][:, 2 ] - y_preds [strategy][:, 1 ])
357+ ax.plot(X_test, y_pis [strategy][:, 1 , 0 ] - y_pis [strategy][:, 0 , 0 ])
358358 ax.axhline(1.96 * 2 * noise, ls = " --" , color = " k" )
359359 ax.set_xlabel(" x" )
360360 ax.set_ylabel(" Prediction Interval Width" )
@@ -514,9 +514,9 @@ and compare their prediction interval.
514514 model_names = [" polyn" , " xgb" , " mlp" ]
515515 prediction_interval = {}
516516 for name, model in zip (model_names, models):
517- mapie = MapieRegressor(model, alpha = 0.05 , method = " plus" , cv = 5 , ensemble = True )
517+ mapie = MapieRegressor(model, method = " plus" , cv = 5 , ensemble = True )
518518 mapie.fit(X_train, y_train)
519- y_preds [name] = mapie.predict(X_test)[:, :, 0 ]
519+ y_pred [name], y_pis[name] = mapie.predict(X_test, alpha = 0.05 )
520520
521521 .. code :: python
522522
@@ -528,9 +528,9 @@ and compare their prediction interval.
528528 X_test.ravel(),
529529 y_mesh.ravel(),
530530 1.96 * noise,
531- y_preds [name][:, 0 ].ravel(),
532- y_preds [name][:, 1 ].ravel(),
533- y_preds [name][:, 2 ].ravel(),
531+ y_pred [name].ravel(),
532+ y_pis [name][:, 0 , 0 ].ravel(),
533+ y_pis [name][:, 1 , 0 ].ravel(),
534534 ax = ax,
535535 title = name
536536 )
0 commit comments