Skip to content

Commit ca9d665

Browse files
committed
add 'maybe_skip' because the 'SKOptLearner' might not be available
1 parent a17c921 commit ca9d665

File tree

1 file changed

+23
-12
lines changed

1 file changed

+23
-12
lines changed

adaptive/tests/test_learners.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,17 @@
1616
import scipy.spatial
1717

1818
from ..learner import (AverageLearner, BalancingLearner, DataSaver,
19-
IntegratorLearner, Learner1D, Learner2D, LearnerND, SKOptLearner)
19+
IntegratorLearner, Learner1D, Learner2D, LearnerND)
2020
from ..runner import simple
2121

2222

23+
try:
24+
import skopt
25+
from ..learner import SKOptLearner
26+
except ModuleNotFoundError:
27+
SKOptLearner = None
28+
29+
2330
def generate_random_parametrization(f):
2431
"""Return a realization of 'f' with parameters bound to random values.
2532
@@ -60,6 +67,10 @@ def xfail(learner):
6067
return pytest.mark.xfail, learner
6168

6269

70+
def maybe_skip(learner):
71+
return (pytest.mark.skip, learner) if learner is None else learner
72+
73+
6374
# All parameters except the first must be annotated with a callable that
6475
# returns a random value for that parameter.
6576

@@ -100,15 +111,15 @@ def gaussian(n):
100111
def run_with(*learner_types):
101112
pars = []
102113
for l in learner_types:
103-
is_xfail = isinstance(l, tuple)
104-
if is_xfail:
105-
xfail, l = l
114+
has_marker = isinstance(l, tuple)
115+
if has_marker:
116+
marker, l = l
106117
for f, k in learner_function_combos[l]:
107118
# Check if learner was marked with our `xfail` decorator
108119
# XXX: doesn't work when feeding kwargs to xfail.
109-
if is_xfail:
120+
if has_marker:
110121
pars.append(pytest.param(l, f, dict(k),
111-
marks=[pytest.mark.xfail]))
122+
marks=[marker]))
112123
else:
113124
pars.append((l, f, dict(k)))
114125
return pytest.mark.parametrize('learner_type, f, learner_kwargs', pars)
@@ -391,8 +402,8 @@ def test_balancing_learner(learner_type, f, learner_kwargs):
391402
assert all(l.npoints > 10 for l in learner.learners), [l.npoints for l in learner.learners]
392403

393404

394-
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
395-
IntegratorLearner)
405+
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
406+
maybe_skip(SKOptLearner), IntegratorLearner)
396407
def test_saving(learner_type, f, learner_kwargs):
397408
f = generate_random_parametrization(f)
398409
learner = learner_type(f, **learner_kwargs)
@@ -412,8 +423,8 @@ def test_saving(learner_type, f, learner_kwargs):
412423
os.remove(path)
413424

414425

415-
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
416-
IntegratorLearner)
426+
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
427+
maybe_skip(SKOptLearner), IntegratorLearner)
417428
def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
418429
f = generate_random_parametrization(f)
419430
learner = BalancingLearner([learner_type(f, **learner_kwargs)])
@@ -438,8 +449,8 @@ def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
438449
shutil.rmtree(folder)
439450

440451

441-
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
442-
IntegratorLearner)
452+
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
453+
maybe_skip(SKOptLearner), IntegratorLearner)
443454
def test_saving_with_datasaver(learner_type, f, learner_kwargs):
444455
f = generate_random_parametrization(f)
445456
g = lambda x: {'y': f(x), 't': random.random()}

0 commit comments

Comments
 (0)