Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,6 @@ module = [
"river.feature_selection.*",
"river.misc.*",
"river.active.*",
"river.conf.*",
"river.neural_net.*",
"river.test_estimators",
"river.dummy",
Expand Down
6 changes: 3 additions & 3 deletions river/conf/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ class Interval:
upper: float

@property
def center(self):
def center(self) -> float:
"""The center of the interval."""
return (self.lower + self.upper) / 2

@property
def width(self):
def width(self) -> float:
"""The width of the interval."""
return self.upper - self.lower

def __contains__(self, x):
def __contains__(self, x: float) -> bool:
return self.lower <= x <= self.upper
41 changes: 34 additions & 7 deletions river/conf/jackknife.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
from __future__ import annotations

from river import base, stats
from collections.abc import Iterator
from typing import Literal, TypeVar, overload

from river import base, compose, stats

from . import interval

T = TypeVar("T", bound=base.Regressor)


class RegressionJackknife(base.Wrapper, base.Regressor):
class RegressionJackknife(base.Wrapper[T], base.Regressor):
"""Jackknife method for regression.

This is a conformal prediction method for regression. It is based on the jackknife method. The
Expand Down Expand Up @@ -81,7 +86,7 @@ class RegressionJackknife(base.Wrapper, base.Regressor):

def __init__(
self,
regressor: base.Regressor,
regressor: T,
confidence_level: float = 0.95,
window_size: int | None = None,
):
Expand All @@ -100,24 +105,46 @@ def __init__(
)

@property
def _wrapped_model(self):
def _wrapped_model(self) -> T:
return self.regressor

@classmethod
def _unit_test_params(cls):
def _unit_test_params(cls) -> Iterator[dict[str, compose.Pipeline]]:
from river import linear_model, preprocessing

yield {"regressor": (preprocessing.StandardScaler() | linear_model.LinearRegression())}

def learn_one(self, x, y, **kwargs):
def learn_one(
self, x: dict[base.typing.FeatureName, object], y: base.typing.RegTarget, **kwargs: object
) -> None:
# Update the quantiles
error = y - self.regressor.predict_one(x)
self._lower.update(error)
self._upper.update(error)

self.regressor.learn_one(x, y, **kwargs)

def predict_one(self, x, with_interval=False, **kwargs):
@overload
def predict_one(
self,
x: dict[base.typing.FeatureName, object],
with_interval: Literal[False] = False,
**kwargs: object,
) -> float: ...
@overload
def predict_one(
self,
x: dict[base.typing.FeatureName, object],
with_interval: Literal[True],
**kwargs: object,
) -> interval.Interval: ...

def predict_one(
self,
x: dict[base.typing.FeatureName, object],
with_interval: bool = False,
**kwargs: object,
) -> float | interval.Interval:
"""Predict the output of features `x`.

Parameters
Expand Down
4 changes: 2 additions & 2 deletions river/stats/quantile.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,12 @@ def __init__(self, q: float, window_size: int):
self.window_size_value = window_size
self._is_updated = False

def update(self, x):
def update(self, x) -> None:
self._rolling_quantile.update(x)
if not self._is_updated:
self._is_updated = True

def get(self):
def get(self) -> float | None:
if not self._is_updated:
return None
return self._rolling_quantile.get()
Expand Down