Skip to content

Commit 0d3ffae

Browse files
committed
fix: default query strategy for BayesianOptimizer fixed
1 parent 401944a commit 0d3ffae

File tree

2 files changed

+16
-9
lines changed

2 files changed

+16
-9
lines changed

modAL/acquisition.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from scipy.special import ndtr
99
from sklearn.exceptions import NotFittedError
1010

11-
from modAL.models import BayesianOptimizer
1211
from modAL.utils.selection import multi_argmax
1312
from modAL.utils.data import modALinput
1413

@@ -33,7 +32,7 @@ def UCB(mean, std, beta):
3332
"""
3433

3534

36-
def optimizer_PI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0) -> np.ndarray:
35+
def optimizer_PI(optimizer, X: modALinput, tradeoff: float = 0) -> np.ndarray:
3736
"""
3837
Probability of improvement acquisition function for Bayesian optimization.
3938
@@ -54,7 +53,7 @@ def optimizer_PI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float =
5453
return PI(mean, std, optimizer.y_max, tradeoff)
5554

5655

57-
def optimizer_EI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0) -> np.ndarray:
56+
def optimizer_EI(optimizer, X: modALinput, tradeoff: float = 0) -> np.ndarray:
5857
"""
5958
Expected improvement acquisition function for Bayesian optimization.
6059
@@ -75,7 +74,7 @@ def optimizer_EI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float =
7574
return EI(mean, std, optimizer.y_max, tradeoff)
7675

7776

78-
def optimizer_UCB(optimizer: BayesianOptimizer, X: modALinput, beta: float = 1) -> np.ndarray:
77+
def optimizer_UCB(optimizer, X: modALinput, beta: float = 1) -> np.ndarray:
7978
"""
8079
Upper confidence bound acquisition function for Bayesian optimization.
8180
@@ -103,7 +102,7 @@ def optimizer_UCB(optimizer: BayesianOptimizer, X: modALinput, beta: float = 1)
103102
"""
104103

105104

106-
def max_PI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0,
105+
def max_PI(optimizer, X: modALinput, tradeoff: float = 0,
107106
n_instances: int = 1) -> Tuple[np.ndarray, modALinput]:
108107
"""
109108
Maximum PI query strategy. Selects the instance with highest probability of improvement.
@@ -123,7 +122,7 @@ def max_PI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0,
123122
return query_idx, X[query_idx]
124123

125124

126-
def max_EI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0,
125+
def max_EI(optimizer, X: modALinput, tradeoff: float = 0,
127126
n_instances: int = 1) -> Tuple[np.ndarray, modALinput]:
128127
"""
129128
Maximum EI query strategy. Selects the instance with highest expected improvement.
@@ -143,7 +142,7 @@ def max_EI(optimizer: BayesianOptimizer, X: modALinput, tradeoff: float = 0,
143142
return query_idx, X[query_idx]
144143

145144

146-
def max_UCB(optimizer: BayesianOptimizer, X: modALinput, beta: float = 1,
145+
def max_UCB(optimizer, X: modALinput, beta: float = 1,
147146
n_instances: int = 1) -> Tuple[np.ndarray, modALinput]:
148147
"""
149148
Maximum UCB query strategy. Selects the instance with highest upper confidence bound.

modAL/models.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from modAL.utils.data import data_vstack, modALinput
1818
from modAL.uncertainty import uncertainty_sampling
1919
from modAL.disagreement import vote_entropy_sampling, max_std_sampling
20+
from modAL.acquisition import max_EI
2021

2122

2223
if sys.version_info >= (3, 4):
@@ -323,8 +324,15 @@ class BayesianOptimizer(BaseLearner):
323324
... optimizer.teach(X[query_idx].reshape(1, -1), y[query_idx].reshape(1, -1))
324325
325326
"""
326-
def __init__(self, *args, **kwargs) -> None:
327-
super(BayesianOptimizer, self).__init__(*args, **kwargs)
327+
def __init__(self,
328+
estimator: BaseEstimator,
329+
query_strategy: Callable = max_EI,
330+
X_training: Optional[modALinput] = None,
331+
y_training: Optional[modALinput] = None,
332+
bootstrap_init: bool = False,
333+
**fit_kwargs) -> None:
334+
super(BayesianOptimizer, self).__init__(estimator, query_strategy,
335+
X_training, y_training, bootstrap_init, **fit_kwargs)
328336
# setting the maximum value
329337
if self.y_training is not None:
330338
max_idx = np.argmax(self.y_training)

0 commit comments

Comments
 (0)