Skip to content

Commit d4ae198

Browse files
committed
working prefit example and added lightgbm to environment doc yaml file
1 parent 1191e20 commit d4ae198

File tree

2 files changed

+11
-14
lines changed

2 files changed

+11
-14
lines changed

environment.doc.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ dependencies:
1111
- sphinx-gallery=0.10.1
1212
- sphinx_rtd_theme=1.0.0
1313
- typing_extensions=4.0.1
14+
- lightgbm=3.1.1

examples/regression/1-quickstart/plot_prefit.py

Lines changed: 10 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -3,27 +3,23 @@
33
Example use of the prefit parameter with neural networks
44
========================================================
55
6-
:class:`mapie.regression.MapieRegressor` and
6+
:class:`mapie.regression.MapieRegressor` and
77
:class:`mapie.quantile_regression.MapieQuantileRegressor``
8-
are used to calibrate uncertainties for large models for
9-
which the cost of cross-validation is too high. Typically,
8+
are used to calibrate uncertainties for large models for
9+
which the cost of cross-validation is too high. Typically,
1010
neural networks rely on a single validation set.
1111
1212
In this example, we first fit a neural network on the training set. We
1313
then compute residuals on a validation set with the `cv="prefit"` parameter.
1414
Finally, we evaluate the model with prediction intervals on a testing set.
1515
"""
1616

17-
from cProfile import label
18-
import subprocess
1917

20-
subprocess.run("pip install lightgbm", shell=True)
21-
22-
import scipy
23-
from lightgbm import LGBMRegressor
2418
import numpy as np
25-
from sklearn.model_selection import train_test_split
19+
from lightgbm import LGBMRegressor
2620
from matplotlib import pyplot as plt
21+
import scipy
22+
from sklearn.model_selection import train_test_split
2723

2824
from mapie.regression import MapieRegressor
2925
from mapie.quantile_regression import MapieQuantileRegressor
@@ -87,15 +83,15 @@ def f(x: NDArray) -> NDArray:
8783
estimator = LGBMRegressor(
8884
objective='quantile',
8985
alpha=0.5,
90-
)
86+
) # Note that this is the same model as used for QR
9187
estimator.fit(X_train.reshape(-1, 1), y_train)
9288
list_estimators.append(estimator)
9389

9490
# Calibrate uncertainties on validation set
9591
mapie_cqr = MapieQuantileRegressor(list_estimators, cv="prefit")
9692
mapie_cqr.fit(X_val.reshape(-1, 1), y_val)
9793
y_pred_cqr, y_pis_cqr = mapie_cqr.predict(X_test.reshape(-1, 1))
98-
y_pred_low_cqr, y_pred_up_cqr = y_pis[:, 0, 0], y_pis[:, 1, 0]
94+
y_pred_low_cqr, y_pred_up_cqr = y_pis_cqr[:, 0, 0], y_pis_cqr[:, 1, 0]
9995
coverage_cqr = regression_coverage_score(y_test, y_pred_low_cqr, y_pred_up_cqr)
10096

10197
# Plot obtained prediction intervals on testing set
@@ -127,14 +123,14 @@ def f(x: NDArray) -> NDArray:
127123
X_test[order],
128124
y_pred_low[order],
129125
y_pred_up[order],
130-
alpha=0.2,
126+
alpha=0.4,
131127
label="prediction intervals QR"
132128
)
133129
plt.fill_between(
134130
X_test[order],
135131
y_pred_low_cqr[order],
136132
y_pred_up_cqr[order],
137-
alpha=0.2,
133+
alpha=0.4,
138134
label="prediction intervals CQR"
139135
)
140136
plt.title(

0 commit comments

Comments
 (0)