Skip to content

Commit 4f359a7

Browse files
committed
correct documentation
1 parent 9f8a5ad commit 4f359a7

12 files changed

+134
-243
lines changed

HISTORY.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
History
33
=======
44

5-
0.3.3 (2022-XX-XX)
5+
0.4.0 (2022-06-24)
66
------------------
77
* Relax and fix typing
88
* Add Split Conformal Quantile Regression

doc/images/quickstart_1.png

0 Bytes
Loading

doc/tutorial_regression.rst

Lines changed: 4 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -144,13 +144,7 @@ prediction intervals.
144144
for strategy, params in STRATEGIES.items():
145145
if strategy == "conformalized_quantile_regression":
146146
mapie = MapieQuantileRegressor(polyn_model_quant, **params)
147-
X_train, X_calib, y_train, y_calib = train_test_split(
148-
X_train,
149-
y_train,
150-
test_size=0.3,
151-
random_state=1
152-
)
153-
mapie.fit(X_train, y_train, X_calib=X_calib, y_calib=y_calib)
147+
mapie.fit(X_train, y_train, random_state=1)
154148
y_pred[strategy], y_pis[strategy] = mapie.predict(X_test)
155149
else:
156150
mapie = MapieRegressor(polyn_model, **params)
@@ -462,13 +456,7 @@ prediction intervals.
462456
for strategy, params in STRATEGIES.items():
463457
if strategy == "conformalized_quantile_regression":
464458
mapie = MapieQuantileRegressor(polyn_model_quant, **params)
465-
X_train, X_calib, y_train, y_calib = train_test_split(
466-
X_train,
467-
y_train,
468-
test_size=0.3,
469-
random_state=1
470-
)
471-
mapie.fit(X_train, y_train, X_calib=X_calib, y_calib=y_calib)
459+
mapie.fit(X_train, y_train, random_state=1)
472460
y_pred[strategy], y_pis[strategy] = mapie.predict(X_test)
473461
else:
474462
mapie = MapieRegressor(polyn_model, **params)
@@ -589,6 +577,7 @@ next figure.
589577
fig = plt.figure()
590578
heteroscedastic_coverage.T.plot.bar(figsize=(12, 4), alpha=0.7)
591579
plt.axhline(0.95, ls="--", color="k")
580+
plt.ylim([0.8, 1])
592581
plt.ylabel("Conditional coverage")
593582
plt.xlabel("x bins")
594583
plt.xticks(rotation=0)
@@ -793,13 +782,7 @@ strategies.
793782
for strategy, params in STRATEGIES.items():
794783
if strategy == "conformalized_quantile_regression":
795784
mapie = MapieQuantileRegressor(polyn_model_quant, **params)
796-
X_train, X_calib, y_train, y_calib = train_test_split(
797-
X_train,
798-
y_train,
799-
test_size=0.3,
800-
random_state=1
801-
)
802-
mapie.fit(X_train, y_train, X_calib=X_calib, y_calib=y_calib)
785+
mapie.fit(X_train, y_train, random_state=1)
803786
y_pred[strategy], y_pis[strategy] = mapie.predict(X_test)
804787
else:
805788
mapie = MapieRegressor(polyn_model, **params)
@@ -1071,7 +1054,6 @@ method and compare their prediction interval.
10711054
10721055
.. parsed-literal::
10731056
1074-
Metal device set to: Apple M1
10751057
10761058
10771059
.. code-block:: python
12.2 KB
Loading
17.1 KB
Loading
-34 Bytes
Loading
1.43 KB
Loading
-655 Bytes
Loading

mapie/quantile_regression.py

Lines changed: 9 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from sklearn.linear_model import QuantileRegressor
77
from sklearn.model_selection import train_test_split
88
from sklearn.pipeline import Pipeline
9+
from sklearn.utils import check_random_state
910
from sklearn.utils.validation import (
1011
indexable,
1112
check_is_fitted,
@@ -330,53 +331,18 @@ def _check_calib_set(
330331
X_calib: Optional[ArrayLike] = None,
331332
y_calib: Optional[ArrayLike] = None,
332333
calib_size: Optional[float] = 0.3,
333-
random_state: Optional[Union[int, None]] = None,
334+
random_state: Optional[Union[int, np.random.RandomState, None]] = None,
334335
shuffle: Optional[bool] = True,
335336
stratify: Optional[ArrayLike] = None,
336337
) -> Tuple[
337338
ArrayLike, ArrayLike, ArrayLike, ArrayLike, Optional[ArrayLike]
338339
]:
339-
"""_summary_
340+
"""
341+
HPUT SOMETHING
340342
341343
Parameters
342344
----------
343-
X : ArrayLike of shape (n_samples, n_features)
344-
Training data.
345-
y : ArrayLike of shape (n_samples,)
346-
Training labels.
347-
sample_weight : Optional[ArrayLike] of shape (n_samples,)
348-
Sample weights for fitting the out-of-fold models.
349-
If None, then samples are equally weighted.
350-
If some weights are null,
351-
their corresponding observations are removed
352-
before the fitting process and hence have no residuals.
353-
If weights are non-uniform, residuals are still uniformly weighted.
354-
Note that the sample weight defined are only for the training, not
355-
for the calibration procedure.
356-
By default ``None``.
357-
X_calib : Optional[ArrayLike] of shape (n_calib_samples, n_features)
358-
Calibration data.
359-
y_calib : Optional[ArrayLike] of shape (n_calib_samples,)
360-
Calibration labels.
361-
calib_size : Optional[float]
362-
If X_calib and y_calib are not defined, then the calibration
363-
dataset is created with the split defined by calib_size.
364-
random_state : int, RandomState instance or None, default=None
365-
For the ``sklearn.model_selection.train_test_split`` documentation.
366-
Controls the shuffling applied to the data before applying the
367-
split.
368-
Pass an int for reproducible output across multiple function calls.
369-
See :term:`Glossary <random_state>`.
370-
shuffle : bool, default=True
371-
For the ``sklearn.model_selection.train_test_split`` documentation.
372-
Whether or not to shuffle the data before splitting.
373-
If shuffle=False
374-
then stratify must be None.
375-
stratify : array-like, default=None
376-
For the ``sklearn.model_selection.train_test_split`` documentation.
377-
If not None, data is split in a stratified fashion, using this as
378-
the class labels.
379-
Read more in the :ref:`User Guide <stratification>`.
345+
Same definition of parameters as for the ``fit`` method.
380346
381347
Returns
382348
-------
@@ -430,7 +396,7 @@ def fit(
430396
X_calib: Optional[ArrayLike] = None,
431397
y_calib: Optional[ArrayLike] = None,
432398
calib_size: Optional[float] = 0.3,
433-
random_state: Optional[Union[int, None]] = None,
399+
random_state: Optional[Union[int, np.random.RandomState, None]] = None,
434400
shuffle: Optional[bool] = True,
435401
stratify: Optional[ArrayLike] = None,
436402
) -> MapieQuantileRegressor:
@@ -493,7 +459,8 @@ def fit(
493459
alpha = self._check_alpha(self.alpha)
494460
self.cv = self._check_cv(cast(str, self.cv))
495461
X, y = indexable(X, y)
496-
X_train, y_train, X_calib, y_calib, sample_weight_train = self._check_calib_set(
462+
random_state = check_random_state(random_state)
463+
results = self._check_calib_set(
497464
X,
498465
y,
499466
sample_weight,
@@ -504,6 +471,7 @@ def fit(
504471
shuffle,
505472
stratify,
506473
)
474+
X_train, y_train, X_calib, y_calib, sample_weight_train = results
507475
X_train, y_train = indexable(X_train, y_train)
508476
X_calib, y_calib = indexable(X_calib, y_calib)
509477
y_train, y_calib = _check_y(y_train), _check_y(y_calib)

mapie/utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -544,6 +544,8 @@ def check_defined_variables_predict_cqr(
544544
545545
Examples
546546
--------
547+
>>> import warnings
548+
>>> warnings.filterwarnings("error")
547549
>>> from mapie.utils import check_defined_variables_predict_cqr
548550
>>> try:
549551
... check_defined_variables_predict_cqr(True, None)

0 commit comments

Comments
 (0)