Skip to content

Commit 3e83b93

Browse files
committed
Merge remote-tracking branch 'origin/master' into average1d
2 parents bb7960d + c7b8341 commit 3e83b93

15 files changed

+184
-10
lines changed

CHANGELOG.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,21 @@
11
# Changelog
22

3+
## [Unreleased](https://github.com/python-adaptive/adaptive/tree/HEAD)
4+
5+
[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.12.2...HEAD)
6+
7+
**Closed issues:**
8+
9+
- Cryptic error when importing lmfit [\#314](https://github.com/python-adaptive/adaptive/issues/314)
10+
11+
## [v0.12.2](https://github.com/python-adaptive/adaptive/tree/v0.12.2) (2021-03-23)
12+
13+
[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.12.1...v0.12.2)
14+
15+
**Merged pull requests:**
16+
17+
- raise an AttributeError when attribute doesn't exists, closes \#314 [\#315](https://github.com/python-adaptive/adaptive/pull/315) ([basnijholt](https://github.com/basnijholt))
18+
319
## [v0.12.1](https://github.com/python-adaptive/adaptive/tree/v0.12.1) (2021-03-23)
420

521
[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.12.0...v0.12.1)

README.rst

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,12 @@
1111
``adaptive`` is an open-source Python library designed to
1212
make adaptive parallel function evaluation simple. With ``adaptive`` you
1313
just supply a function with its bounds, and it will be evaluated at the
14-
“best” points in parameter space. With just a few lines of code you can
15-
evaluate functions on a computing cluster, live-plot the data as it
16-
returns, and fine-tune the adaptive sampling algorithm.
14+
“best” points in parameter space, rather than unecessarily computing *all* points on a dense grid.
15+
With just a few lines of code you can evaluate functions on a computing cluster,
16+
live-plot the data as it returns, and fine-tune the adaptive sampling algorithm.
17+
18+
``adaptive`` shines on computations where each evaluation of the function
19+
takes *at least* ≈100ms due to the overhead of picking potentially interesting points.
1720

1821
Run the ``adaptive`` example notebook `live on
1922
Binder <https://mybinder.org/v2/gh/python-adaptive/adaptive/master?filepath=example-notebook.ipynb>`_

adaptive/learner/average_learner.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,10 @@ def __init__(self, function, atol=None, rtol=None, min_npoints=2):
5656
def n_requested(self):
5757
return self.npoints + len(self.pending_points)
5858

59+
def to_numpy(self):
60+
"""Data as NumPy array of size (npoints, 2) with seeds and values."""
61+
return np.array(sorted(self.data.items()))
62+
5963
def ask(self, n, tell_pending=True):
6064
points = list(range(self.n_requested, self.n_requested + n))
6165

@@ -114,9 +118,11 @@ def loss(self, real=True, *, n=None):
114118
if n < self.min_npoints:
115119
return np.inf
116120
standard_error = self.std / sqrt(n)
117-
return max(
118-
standard_error / self.atol, standard_error / abs(self.mean) / self.rtol
119-
)
121+
aloss = standard_error / self.atol
122+
rloss = standard_error / self.rtol
123+
if self.mean != 0:
124+
rloss /= abs(self.mean)
125+
return max(aloss, rloss)
120126

121127
def _loss_improvement(self, n):
122128
loss = self.loss()

adaptive/learner/integrator_coeffs.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,5 +186,8 @@ def _coefficients():
186186
return locals()
187187

188188

189-
def __getattr__(attr):
190-
return _coefficients()[attr]
189+
def __getattr__(name):
190+
try:
191+
return _coefficients()[name]
192+
except KeyError:
193+
raise AttributeError(f"module {__name__} has no attribute {name}")

adaptive/learner/integrator_learner.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -546,6 +546,10 @@ def plot(self):
546546
xs, ys = zip(*[(x, y) for ival in ivals for x, y in sorted(ival.data.items())])
547547
return hv.Path((xs, ys))
548548

549+
def to_numpy(self):
550+
"""Data as NumPy array of size (npoints, 2)."""
551+
return np.array(sorted(self.data.items()))
552+
549553
def _get_data(self):
550554
# Change the defaultdict of SortedSets to a normal dict of sets.
551555
x_mapping = {k: set(v) for k, v in self.x_mapping.items()}

adaptive/learner/learner1D.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def resolution_loss_function(min_length=0, max_length=1):
9696
... return x**2
9797
>>>
9898
>>> loss = resolution_loss_function(min_length=0.01, max_length=1)
99-
>>> learner = adaptive.Learner1D(f, bounds=[(-1, -1), (1, 1)], loss_per_triangle=loss)
99+
>>> learner = adaptive.Learner1D(f, bounds=(-1, -1), loss_per_interval=loss)
100100
"""
101101

102102
@uses_nth_neighbors(0)
@@ -269,6 +269,11 @@ def vdim(self):
269269
return 1
270270
return self._vdim
271271

272+
def to_numpy(self):
273+
"""Data as NumPy array of size ``(npoints, 2)`` if ``learner.function`` returns a scalar
274+
and ``(npoints, 1+vdim)`` if ``learner.function`` returns a vector of length ``vdim``."""
275+
return np.array([(x, *np.atleast_1d(y)) for x, y in sorted(self.data.items())])
276+
272277
@property
273278
def npoints(self):
274279
"""Number of evaluated points."""

adaptive/learner/learner2D.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -377,6 +377,13 @@ def xy_scale(self):
377377
else:
378378
return np.array([xy_scale[0], xy_scale[1] / self.aspect_ratio])
379379

380+
def to_numpy(self):
381+
"""Data as NumPy array of size ``(npoints, 3)`` if ``learner.function`` returns a scalar
382+
and ``(npoints, 2+vdim)`` if ``learner.function`` returns a vector of length ``vdim``."""
383+
return np.array(
384+
[(x, y, *np.atleast_1d(z)) for (x, y), z in sorted(self.data.items())]
385+
)
386+
380387
def _scale(self, points):
381388
points = np.asarray(points, dtype=float)
382389
return (points - self.xy_mean) / self.xy_scale

adaptive/learner/learnerND.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -379,6 +379,12 @@ def vdim(self):
379379
self._vdim = 1
380380
return self._vdim if self._vdim is not None else 1
381381

382+
def to_numpy(self):
383+
"""Data as NumPy array of size ``(npoints, dim+vdim)``, where ``dim`` is the
384+
size of the input dimension and ``vdim`` is the length of the return value
385+
of ``learner.function``."""
386+
return np.array([(*p, *np.atleast_1d(v)) for p, v in sorted(self.data.items())])
387+
382388
@property
383389
def bounds_are_done(self):
384390
return all(p in self.data for p in self._bounds_points)

adaptive/runner.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -412,6 +412,10 @@ def _run(self):
412412
remaining = self._remove_unfinished()
413413
if remaining:
414414
concurrent.wait(remaining)
415+
# Some futures get their result set, despite being cancelled.
416+
# see https://github.com/python-adaptive/adaptive/issues/319
417+
with_result = [f for f in remaining if not f.cancelled() and f.done()]
418+
self._process_futures(with_result)
415419
self._cleanup()
416420

417421
def elapsed_time(self):

adaptive/tests/test_average_learner.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,3 +59,11 @@ def constant_function(seed):
5959
)
6060
simple(learner, lambda l: l.loss() < 1)
6161
assert learner.npoints >= max(2, min_npoints)
62+
63+
64+
def test_zero_mean():
65+
# see https://github.com/python-adaptive/adaptive/issues/275
66+
learner = AverageLearner(None, rtol=0.01)
67+
learner.tell(0, -1)
68+
learner.tell(1, 1)
69+
learner.loss()

0 commit comments

Comments
 (0)