|
| 1 | +# pylint: disable=unused-argument |
| 2 | +from sklearn.covariance import EllipticEnvelope |
| 3 | +from sklearn.ensemble import IsolationForest |
| 4 | +from sklearn.neighbors import LocalOutlierFactor |
| 5 | +from Orange.base import SklLearner, SklModel |
| 6 | +from Orange.data import Table, Domain |
| 7 | + |
| 8 | +__all__ = ["LocalOutlierFactorLearner", "IsolationForestLearner", |
| 9 | + "EllipticEnvelopeLearner"] |
| 10 | + |
| 11 | + |
| 12 | +class _OutlierDetector(SklLearner): |
| 13 | + def __call__(self, data: Table): |
| 14 | + data = data.transform(Domain(data.domain.attributes)) |
| 15 | + return super().__call__(data) |
| 16 | + |
| 17 | + |
| 18 | +class LocalOutlierFactorLearner(_OutlierDetector): |
| 19 | + __wraps__ = LocalOutlierFactor |
| 20 | + name = "Local Outlier Factor" |
| 21 | + |
| 22 | + def __init__(self, n_neighbors=20, algorithm="auto", leaf_size=30, |
| 23 | + metric="minkowski", p=2, metric_params=None, |
| 24 | + contamination="auto", novelty=True, n_jobs=None, |
| 25 | + preprocessors=None): |
| 26 | + super().__init__(preprocessors=preprocessors) |
| 27 | + self.params = vars() |
| 28 | + |
| 29 | + |
| 30 | +class IsolationForestLearner(_OutlierDetector): |
| 31 | + __wraps__ = IsolationForest |
| 32 | + name = "Isolation Forest" |
| 33 | + |
| 34 | + def __init__(self, n_estimators=100, max_samples='auto', |
| 35 | + contamination='auto', max_features=1.0, bootstrap=False, |
| 36 | + n_jobs=None, behaviour='deprecated', random_state=None, |
| 37 | + verbose=0, warm_start=False, preprocessors=None): |
| 38 | + super().__init__(preprocessors=preprocessors) |
| 39 | + self.params = vars() |
| 40 | + |
| 41 | + |
| 42 | +class EllipticEnvelopeClassifier(SklModel): |
| 43 | + def mahalanobis(self, observations): |
| 44 | + """Computes squared Mahalanobis distances of given observations. |
| 45 | +
|
| 46 | + Parameters |
| 47 | + ---------- |
| 48 | + observations : ndarray (n_samples, n_features) or Orange Table |
| 49 | +
|
| 50 | + Returns |
| 51 | + ------- |
| 52 | + distances : ndarray (n_samples,) |
| 53 | + Squared Mahalanobis distances given observations. |
| 54 | + """ |
| 55 | + if isinstance(observations, Table): |
| 56 | + observations = observations.X |
| 57 | + return self.skl_model.mahalanobis(observations) |
| 58 | + |
| 59 | + |
| 60 | +class EllipticEnvelopeLearner(_OutlierDetector): |
| 61 | + __wraps__ = EllipticEnvelope |
| 62 | + __returns__ = EllipticEnvelopeClassifier |
| 63 | + name = "Covariance Estimator" |
| 64 | + |
| 65 | + def __init__(self, store_precision=True, assume_centered=False, |
| 66 | + support_fraction=None, contamination=0.1, |
| 67 | + random_state=None, preprocessors=None): |
| 68 | + super().__init__(preprocessors=preprocessors) |
| 69 | + self.params = vars() |
| 70 | + |
| 71 | + def __call__(self, data: Table): |
| 72 | + data = data.transform(Domain(data.domain.attributes)) |
| 73 | + return super().__call__(data) |
0 commit comments