Skip to content

Commit c4d62d6

Browse files
committed
multi_argmax and max_std_sampling test case coverage increased
1 parent 4029dfd commit c4d62d6

File tree

2 files changed

+4
-9
lines changed

2 files changed

+4
-9
lines changed

tests/core_tests.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def test_data_vstack(self):
164164

165165
def test_multi_argmax(self):
166166
for n_pool in range(2, 100):
167-
for n_instances in range(1, n_pool):
167+
for n_instances in range(1, n_pool+1):
168168
utility = np.zeros(n_pool)
169169
max_idx = np.random.choice(range(n_pool), size=n_instances, replace=False)
170170
utility[max_idx] = 1e-10 + np.random.rand(n_instances, )
@@ -1087,3 +1087,4 @@ def test_examples(self):
10871087

10881088
if __name__ == '__main__':
10891089
unittest.main(verbosity=2)
1090+
0

tests/example_tests/active_regression.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,10 @@
66
from sklearn.gaussian_process import GaussianProcessRegressor
77
from sklearn.gaussian_process.kernels import WhiteKernel, RBF
88
from modAL.models import ActiveLearner
9+
from modAL.disagreement import max_std_sampling
910

1011
np.random.seed(0)
1112

12-
13-
# query strategy for regression
14-
def GP_regression_std(regressor, X):
15-
_, std = regressor.predict(X, return_std=True)
16-
query_idx = np.argmax(std)
17-
return query_idx, X[query_idx]
18-
1913
# generating the data
2014
X = np.random.choice(np.linspace(0, 20, 10000), size=200, replace=False).reshape(-1, 1)
2115
y = np.sin(X) + np.random.normal(scale=0.3, size=X.shape)
@@ -32,7 +26,7 @@ def GP_regression_std(regressor, X):
3226
# initializing the active learner
3327
regressor = ActiveLearner(
3428
estimator=GaussianProcessRegressor(kernel=kernel),
35-
query_strategy=GP_regression_std,
29+
query_strategy=max_std_sampling,
3630
X_training=X_initial.reshape(-1, 1), y_training=y_initial.reshape(-1, 1)
3731
)
3832

0 commit comments

Comments
 (0)