Skip to content

Commit 6fc8ebf

Browse files
FaustinPulvericValentin-Laurent
authored andcommitted
Update plot_sadinle2019_example.py to v1 API (#674)
1 parent cdb77bd commit 6fc8ebf

File tree

1 file changed

+18
-16
lines changed

1 file changed

+18
-16
lines changed

examples/classification/3-scientific-articles/plot_sadinle2019_example.py

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
Reproducing Example 7 from Sadinle et al. (2019)
44
================================================
55
6-
We use :class:`~mapie.classification._MapieClassifier` to reproduce
6+
We use :class:`~mapie_v1.classification.SplitConformalClassifier` to reproduce
77
Example 7 from Sadinle et al. (2019).
88
99
We consider a two-dimensional dataset with three labels. The distribution
@@ -12,9 +12,10 @@
1212
We model the data with Gaussian Naive Bayes classifier
1313
:class:`~sklearn.naive_bayes.GaussianNB` as a base model.
1414
15-
Prediction sets are estimated by :class:`~mapie.classification._MapieClassifier`
16-
from the distribution of the softmax scores of the true labels for three
17-
alpha values (0.2, 0.1, and 0.05) giving different class coverage levels.
15+
Prediction sets are estimated by
16+
:class:`~mapie_v1.classification.SplitConformalClassifier` from the distribution of the
17+
softmax scores of the true labels for three confidence level values (0.8, 0.9, and 0.95)
18+
giving different class coverage levels.
1819
1920
When the class coverage level is not large enough, the prediction sets can be
2021
empty.
@@ -25,8 +26,7 @@
2526
import numpy as np
2627
from sklearn.naive_bayes import GaussianNB
2728

28-
from mapie.classification import _MapieClassifier
29-
from mapie.conformity_scores import LACConformityScore
29+
from mapie.classification import SplitConformalClassifier
3030

3131
# Create training set from multivariate normal distribution
3232
centers = [(0, 3.5), (-2, 0), (2, 0)]
@@ -35,7 +35,7 @@
3535
x_min, x_max, y_min, y_max, step = -6, 8, -6, 8, 0.1
3636
n_samples = 500
3737
n_classes = 3
38-
alpha = [0.2, 0.1, 0.05]
38+
confidence_level = [0.8, 0.9, 0.95]
3939
np.random.seed(42)
4040
X_train = np.vstack(
4141
[
@@ -52,18 +52,20 @@
5252
)
5353
X_test = np.stack([xx.ravel(), yy.ravel()], axis=1)
5454

55-
# Apply _MapieClassifier on the dataset to get prediction sets
56-
clf = GaussianNB().fit(X_train, y_train)
55+
# Apply SplitConformalClassifier on the dataset to get prediction sets
56+
clf = GaussianNB()
57+
clf.fit(X_train, y_train)
5758
y_pred = clf.predict(X_test)
5859
y_pred_proba = clf.predict_proba(X_test)
5960
y_pred_proba_max = np.max(y_pred_proba, axis=1)
60-
mapie = _MapieClassifier(
61+
mapie = SplitConformalClassifier(
6162
estimator=clf,
62-
cv="prefit",
63-
conformity_score=LACConformityScore()
63+
confidence_level=confidence_level,
64+
prefit=True,
65+
conformity_score="lac"
6466
)
65-
mapie.fit(X_train, y_train)
66-
y_pred_mapie, y_ps_mapie = mapie.predict(X_test, alpha=alpha)
67+
mapie.conformalize(X_train, y_train)
68+
y_pred_mapie, y_ps_mapie = mapie.predict_set(X_test)
6769

6870
# Plot the results
6971
tab10 = plt.cm.get_cmap("Purples", 4)
@@ -84,7 +86,7 @@
8486
edgecolor="k",
8587
)
8688
axs[0].set_title("Predicted labels")
87-
for i, alpha_ in enumerate(alpha):
89+
for i, confidence_level_ in enumerate(confidence_level):
8890
y_ps_sums = y_ps_mapie[:, :, i].sum(axis=1)
8991
num_labels = axs[i + 1].scatter(
9092
X_test[:, 0],
@@ -98,5 +100,5 @@
98100
vmax=3,
99101
)
100102
cbar = plt.colorbar(num_labels, ax=axs[i + 1])
101-
axs[i + 1].set_title(f"Number of labels for alpha={alpha_}")
103+
axs[i + 1].set_title(f"Number of labels for confidence_level={confidence_level_}")
102104
plt.show()

0 commit comments

Comments
 (0)