Skip to content

Commit 2a10da0

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent daa37e9 commit 2a10da0

File tree

1 file changed

+39
-45
lines changed

1 file changed

+39
-45
lines changed

src/coniferest/aadforest.py

Lines changed: 39 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import numpy as np
66
from scipy.optimize import minimize
7-
from scipy.special import log_expit, expit
7+
from scipy.special import expit, log_expit
88

99
from .calc_trees import calc_paths_sum, calc_paths_sum_transpose # noqa
1010
from .coniferest import Coniferest, ConiferestEvaluator
@@ -36,7 +36,7 @@ class AADCrossEntropyEvaluator(AADEvaluator):
3636
def __init__(self, aad):
3737
super(AADCrossEntropyEvaluator, self).__init__(aad)
3838
self.weights = np.ones(shape=(self.n_leaves,))
39-
self.bias = 0.0 # Not sure about 0.0
39+
self.bias = 0.0 # Not sure about 0.0
4040

4141
def score_samples(self, x, weights=None):
4242
# Anomaly score is a probability of being REGULAR data.
@@ -47,46 +47,45 @@ def score_samples(self, x, weights=None):
4747
if weights is None:
4848
weights = self.weights
4949

50-
return expit(calc_paths_sum(
51-
self.selectors,
52-
self.node_offsets,
53-
x,
54-
weights,
55-
num_threads=self.num_threads,
56-
batch_size=self.get_batch_size(self.n_trees),
57-
) + self.bias)
58-
59-
def loss(
60-
self,
61-
weights,
62-
known_data,
63-
known_labels):
50+
return expit(
51+
calc_paths_sum(
52+
self.selectors,
53+
self.node_offsets,
54+
x,
55+
weights,
56+
num_threads=self.num_threads,
57+
batch_size=self.get_batch_size(self.n_trees),
58+
)
59+
+ self.bias
60+
)
6461

65-
v = calc_paths_sum(
66-
self.selectors,
67-
self.node_offsets,
68-
known_data,
69-
weights[1:],
70-
num_threads=self.num_threads,
71-
batch_size=self.get_batch_size(self.n_trees),
72-
) + weights[0]
62+
def loss(self, weights, known_data, known_labels):
63+
v = (
64+
calc_paths_sum(
65+
self.selectors,
66+
self.node_offsets,
67+
known_data,
68+
weights[1:],
69+
num_threads=self.num_threads,
70+
batch_size=self.get_batch_size(self.n_trees),
71+
)
72+
+ weights[0]
73+
)
7374

7475
return -np.sum(log_expit(known_labels * v))
7576

76-
def loss_gradient(
77-
self,
78-
weights,
79-
known_data,
80-
known_labels):
81-
82-
v = calc_paths_sum(
83-
self.selectors,
84-
self.node_offsets,
85-
known_data,
86-
weights[1:],
87-
num_threads=self.num_threads,
88-
batch_size=self.get_batch_size(self.n_trees),
89-
) + weights[0]
77+
def loss_gradient(self, weights, known_data, known_labels):
78+
v = (
79+
calc_paths_sum(
80+
self.selectors,
81+
self.node_offsets,
82+
known_data,
83+
weights[1:],
84+
num_threads=self.num_threads,
85+
batch_size=self.get_batch_size(self.n_trees),
86+
)
87+
+ weights[0]
88+
)
9089

9190
dloss_dv = -known_labels * expit(-known_labels * v)
9291
dloss_dbias = np.sum(dloss_dv)
@@ -102,14 +101,9 @@ def loss_gradient(
102101

103102
return np.concatenate([[dloss_dbias], dloss_dweights])
104103

105-
def loss_hessian(
106-
self,
107-
weights,
108-
vector,
109-
known_data,
110-
known_labels):
104+
def loss_hessian(self, weights, vector, known_data, known_labels):
111105
pass
112-
106+
113107

114108
class AADHingeEvaluator(AADEvaluator):
115109
def __init__(self, aad):

0 commit comments

Comments
 (0)