Skip to content

Commit e35e0db

Browse files
committed
add type annotations for adaptive/tests/test_learner1d.py
1 parent 5bfa13f commit e35e0db

File tree

1 file changed

+21
-18
lines changed

1 file changed

+21
-18
lines changed

adaptive/tests/test_learner1d.py

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import random
2+
from typing import Tuple, Union
23

34
import numpy as np
45

@@ -7,7 +8,7 @@
78
from adaptive.runner import simple
89

910

10-
def test_pending_loss_intervals():
11+
def test_pending_loss_intervals() -> None:
1112
# https://github.com/python-adaptive/adaptive/issues/40
1213
learner = Learner1D(lambda x: x, (0, 4))
1314

@@ -21,7 +22,7 @@ def test_pending_loss_intervals():
2122
assert set(learner.losses_combined.keys()) == {(0, 1), (1, 2), (2, 3.5), (3.5, 4.0)}
2223

2324

24-
def test_loss_interpolation_for_unasked_point():
25+
def test_loss_interpolation_for_unasked_point() -> None:
2526
# https://github.com/python-adaptive/adaptive/issues/40
2627
learner = Learner1D(lambda x: x, (0, 4))
2728

@@ -53,7 +54,7 @@ def test_loss_interpolation_for_unasked_point():
5354
}
5455

5556

56-
def test_first_iteration():
57+
def test_first_iteration() -> None:
5758
"""Edge cases where we ask for a few points at the start."""
5859
learner = Learner1D(lambda x: None, (-1, 1))
5960
points, loss_improvements = learner.ask(2)
@@ -87,7 +88,7 @@ def test_first_iteration():
8788
assert points == [1]
8889

8990

90-
def test_loss_interpolation():
91+
def test_loss_interpolation() -> None:
9192
learner = Learner1D(lambda _: 0, bounds=(-1, 1))
9293

9394
learner.tell(-1, 0)
@@ -104,7 +105,9 @@ def test_loss_interpolation():
104105
assert abs(expected_loss - loss) < 1e-15, (expected_loss, loss)
105106

106107

107-
def _run_on_discontinuity(x_0, bounds):
108+
def _run_on_discontinuity(
109+
x_0: Union[int, float], bounds: Union[Tuple[int, int], Tuple[float, float]]
110+
) -> Learner1D:
108111
def f(x):
109112
return -1 if x < x_0 else +1
110113

@@ -116,7 +119,7 @@ def f(x):
116119
return learner
117120

118121

119-
def test_termination_on_discontinuities():
122+
def test_termination_on_discontinuities() -> None:
120123

121124
learner = _run_on_discontinuity(0, (-1, 1))
122125
smallest_interval = min(abs(a - b) for a, b in learner.losses.keys())
@@ -131,7 +134,7 @@ def test_termination_on_discontinuities():
131134
assert smallest_interval >= 0.5e3 * np.finfo(float).eps
132135

133136

134-
def test_order_adding_points():
137+
def test_order_adding_points() -> None:
135138
# and https://github.com/python-adaptive/adaptive/issues/41
136139
learner = Learner1D(lambda x: x, (0, 1))
137140
learner.tell_many([1, 0, 0.5], [0, 0, 0])
@@ -140,7 +143,7 @@ def test_order_adding_points():
140143
learner.ask(1)
141144

142145

143-
def test_adding_existing_point_passes_silently():
146+
def test_adding_existing_point_passes_silently() -> None:
144147
# See https://github.com/python-adaptive/adaptive/issues/42
145148
learner = Learner1D(lambda x: x, (0, 4))
146149
learner.tell(0, 0)
@@ -149,7 +152,7 @@ def test_adding_existing_point_passes_silently():
149152
learner.tell(1, 100)
150153

151154

152-
def test_loss_at_machine_precision_interval_is_zero():
155+
def test_loss_at_machine_precision_interval_is_zero() -> None:
153156
"""The loss of an interval smaller than _dx_eps
154157
should be set to zero."""
155158

@@ -166,11 +169,11 @@ def goal(l):
166169
assert learner.npoints != 1000
167170

168171

169-
def small_deviations(x):
172+
def small_deviations(x: float) -> Union[int, float]:
170173
return 0 if x <= 1 else 1 + 10 ** (-random.randint(12, 14))
171174

172175

173-
def test_small_deviations():
176+
def test_small_deviations() -> None:
174177
"""This tests whether the Learner1D can handle small deviations.
175178
See https://gitlab.kwant-project.org/qt/adaptive/merge_requests/73 and
176179
https://github.com/python-adaptive/adaptive/issues/78."""
@@ -205,7 +208,7 @@ def test_small_deviations():
205208
break
206209

207210

208-
def test_uniform_sampling1D_v2():
211+
def test_uniform_sampling1D_v2() -> None:
209212
def check(known, expect):
210213
def f(x):
211214
return x
@@ -227,7 +230,7 @@ def f(x):
227230
check([-1, -0.5], {-0.75, 0.25, 1})
228231

229232

230-
def test_add_data_unordered():
233+
def test_add_data_unordered() -> None:
231234
# see https://github.com/python-adaptive/adaptive/issues/44
232235
learner = Learner1D(lambda x: x, bounds=(-1, 1))
233236
xs = [-1, 1, 0]
@@ -239,14 +242,14 @@ def test_add_data_unordered():
239242
learner.ask(3)
240243

241244

242-
def test_ask_does_not_return_known_points_when_returning_bounds():
245+
def test_ask_does_not_return_known_points_when_returning_bounds() -> None:
243246
learner = Learner1D(lambda x: None, (-1, 1))
244247
learner.tell(0, 0)
245248
points, _ = learner.ask(3)
246249
assert 0 not in points
247250

248251

249-
def test_tell_many():
252+
def test_tell_many() -> None:
250253
def f(x, offset=0.123214):
251254
a = 0.01
252255
return (
@@ -355,7 +358,7 @@ def _random_run(learner, learner2, scale_doubling=True):
355358
test_equal(learner, learner2)
356359

357360

358-
def test_curvature_loss():
361+
def test_curvature_loss() -> None:
359362
def f(x):
360363
return np.tanh(20 * x)
361364

@@ -366,7 +369,7 @@ def f(x):
366369
assert learner.npoints > 100
367370

368371

369-
def test_curvature_loss_vectors():
372+
def test_curvature_loss_vectors() -> None:
370373
def f(x):
371374
return np.tanh(20 * x), np.tanh(20 * (x - 0.4))
372375

@@ -377,7 +380,7 @@ def f(x):
377380
assert learner.npoints > 100
378381

379382

380-
def test_NaN_loss():
383+
def test_NaN_loss() -> None:
381384
# see https://github.com/python-adaptive/adaptive/issues/145
382385
def f(x):
383386
a = 0.01

0 commit comments

Comments
 (0)