Skip to content

Commit d4359a4

Browse files
committed
Replace point goals
1 parent 30bccb7 commit d4359a4

File tree

7 files changed

+15
-15
lines changed

7 files changed

+15
-15
lines changed

adaptive/tests/test_learner1d.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ def test_equal(l1, l2):
298298
for function in [f, f_vec]:
299299
learner = Learner1D(function, bounds=(-1, 1))
300300
learner2 = Learner1D(function, bounds=(-1, 1))
301-
simple(learner, goal=lambda l: l.npoints > 200)
301+
simple(learner, goal=200)
302302
xs, ys = zip(*learner.data.items())
303303

304304
# Make the scale huge to no get a scale doubling
@@ -374,7 +374,7 @@ def f(x):
374374
loss = curvature_loss_function()
375375
assert loss.nth_neighbors == 1
376376
learner = Learner1D(f, (-1, 1), loss_per_interval=loss)
377-
simple(learner, goal=lambda l: l.npoints > 100)
377+
simple(learner, goal=100)
378378
assert learner.npoints > 100
379379

380380

@@ -385,7 +385,7 @@ def f(x):
385385
loss = curvature_loss_function()
386386
assert loss.nth_neighbors == 1
387387
learner = Learner1D(f, (-1, 1), loss_per_interval=loss)
388-
simple(learner, goal=lambda l: l.npoints > 100)
388+
simple(learner, goal=100)
389389
assert learner.npoints > 100
390390

391391

@@ -398,7 +398,7 @@ def f(x):
398398
return x + a**2 / (a**2 + x**2)
399399

400400
learner = Learner1D(f, bounds=(-1, 1))
401-
simple(learner, lambda l: l.npoints > 100)
401+
simple(learner, 100)
402402

403403

404404
def test_inf_loss_with_missing_bounds():
@@ -408,6 +408,6 @@ def test_inf_loss_with_missing_bounds():
408408
loss_per_interval=curvature_loss_function(),
409409
)
410410
# must be done in parallel because otherwise the bounds will be evaluated first
411-
BlockingRunner(learner, goal=lambda learner: learner.loss() < 0.01)
411+
BlockingRunner(learner, goal=0.01)
412412

413413
learner.npoints > 20

adaptive/tests/test_runner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def f(x):
4343
return x
4444

4545
learner = Learner1D(f, (-1, 1))
46-
runner(learner, lambda l: l.npoints > 10)
46+
runner(learner, 10)
4747
assert len(learner.data) > 10
4848

4949

@@ -152,5 +152,5 @@ def test_loky_executor(loky_executor):
152152

153153
def test_default_executor():
154154
learner = Learner1D(linear, (-1, 1))
155-
runner = AsyncRunner(learner, goal=lambda l: l.npoints > 10)
155+
runner = AsyncRunner(learner, goal=10)
156156
asyncio.get_event_loop().run_until_complete(runner.task)

adaptive/tests/unit/test_learnernd_integration.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,29 +16,29 @@ def ring_of_fire(xy, d=0.75):
1616

1717
def test_learnerND_runs_to_10_points():
1818
learner = LearnerND(ring_of_fire, bounds=[(-1, 1), (-1, 1)])
19-
SimpleRunner(learner, goal=lambda l: l.npoints >= 10)
19+
SimpleRunner(learner, goal=10)
2020
assert learner.npoints == 10
2121

2222

2323
@pytest.mark.parametrize("execution_number", range(5))
2424
def test_learnerND_runs_to_10_points_Blocking(execution_number):
2525
learner = LearnerND(ring_of_fire, bounds=[(-1, 1), (-1, 1)])
26-
BlockingRunner(learner, goal=lambda l: l.npoints >= 10)
26+
BlockingRunner(learner, goal=10)
2727
assert learner.npoints >= 10
2828

2929

3030
def test_learnerND_curvature_runs_to_10_points():
3131
loss = curvature_loss_function()
3232
learner = LearnerND(ring_of_fire, bounds=[(-1, 1), (-1, 1)], loss_per_simplex=loss)
33-
SimpleRunner(learner, goal=lambda l: l.npoints >= 10)
33+
SimpleRunner(learner, goal=10)
3434
assert learner.npoints == 10
3535

3636

3737
@pytest.mark.parametrize("execution_number", range(5))
3838
def test_learnerND_curvature_runs_to_10_points_Blocking(execution_number):
3939
loss = curvature_loss_function()
4040
learner = LearnerND(ring_of_fire, bounds=[(-1, 1), (-1, 1)], loss_per_simplex=loss)
41-
BlockingRunner(learner, goal=lambda l: l.npoints >= 10)
41+
BlockingRunner(learner, goal=10)
4242
assert learner.npoints >= 10
4343

4444

docs/source/tutorial/tutorial.Learner1D.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ learner_h = adaptive.Learner1D(sin_exp, (-1, 1), loss_per_interval=uniform_loss)
187187
learner_1 = adaptive.Learner1D(sin_exp, (-1, 1), loss_per_interval=default_loss)
188188
learner_2 = adaptive.Learner1D(sin_exp, (-1, 1), loss_per_interval=curvature_loss)
189189
190-
npoints_goal = lambda l: l.npoints >= 100
190+
npoints_goal = 100
191191
# adaptive.runner.simple is a non parallel blocking runner.
192192
adaptive.runner.simple(learner_h, goal=npoints_goal)
193193
adaptive.runner.simple(learner_1, goal=npoints_goal)

docs/source/tutorial/tutorial.LearnerND.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ b = [(-1, -1, -1), (-1, 1, -1), (-1, -1, 1), (-1, 1, 1), (1, 1, -1), (1, -1, -1)
123123
hull = scipy.spatial.ConvexHull(b)
124124
125125
learner = adaptive.LearnerND(f, hull)
126-
adaptive.BlockingRunner(learner, goal=lambda l: l.npoints > 2000)
126+
adaptive.BlockingRunner(learner, goal=2000)
127127
128128
learner.plot_isosurface(-0.5)
129129
```

docs/source/tutorial/tutorial.SKOptLearner.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ learner = adaptive.SKOptLearner(
4747
acq_func="gp_hedge",
4848
acq_optimizer="lbfgs",
4949
)
50-
runner = adaptive.Runner(learner, ntasks=1, goal=lambda l: l.npoints > 40)
50+
runner = adaptive.Runner(learner, ntasks=1, goal=40)
5151
```
5252

5353
```{code-cell} ipython3

docs/source/tutorial/tutorial.advanced-topics.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def slow_f(x):
9090
9191
9292
learner = adaptive.Learner1D(slow_f, bounds=[0, 1])
93-
runner = adaptive.Runner(learner, goal=lambda l: l.npoints > 100)
93+
runner = adaptive.Runner(learner, goal=100)
9494
runner.start_periodic_saving(
9595
save_kwargs=dict(fname="data/periodic_example.p"), interval=6
9696
)

0 commit comments

Comments
 (0)