Skip to content

Commit 848cf78

Browse files
bwengalsJunpeng Lao
authored andcommitted
API change in find_MAP (#2539)
* small typo fix in ValueGradFunction docstring, extra_args -> extra_vars * added different interface to scipy optimizers, removed errors * change error if optimization result is bad to warning * remove test for optimization error, remove transform=None, update find_MAP call args * update find_MAP call arg * small docstring change * remove blank links, remove extraneous callback arg * remove unused imports * test fail with precision a bit off, 9.996 vs. 10. Switching to previous default method of BFGS. * removed optimization check (since 'return_raw' is an option), added 'include_transformed' * remove unused import * need include_transformed=True
1 parent ea8f872 commit 848cf78

File tree

6 files changed

+141
-141
lines changed

6 files changed

+141
-141
lines changed

pymc3/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -329,7 +329,7 @@ class ValueGradFunction(object):
329329
The value that we compute with its gradient.
330330
grad_vars : list of named theano variables or None
331331
The arguments with respect to which the gradient is computed.
332-
extra_args : list of named theano variables or None
332+
extra_vars : list of named theano variables or None
333333
Other arguments of the function that are assumed constant. They
334334
are stored in shared variables and can be set using
335335
`set_extra_values`.

pymc3/sampling.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -842,7 +842,7 @@ def init_nuts(init='auto', njobs=1, n_init=500000, model=None,
842842
if njobs == 1:
843843
start = start[0]
844844
elif init == 'advi_map':
845-
start = pm.find_MAP()
845+
start = pm.find_MAP(include_transformed=True)
846846
approx = pm.MeanField(model=model, start=start)
847847
pm.fit(
848848
random_seed=random_seed,
@@ -859,7 +859,7 @@ def init_nuts(init='auto', njobs=1, n_init=500000, model=None,
859859
if njobs == 1:
860860
start = start[0]
861861
elif init == 'map':
862-
start = pm.find_MAP()
862+
start = pm.find_MAP(include_transformed=True)
863863
cov = pm.find_hessian(point=start)
864864
start = [start] * njobs
865865
potential = quadpotential.QuadPotentialFull(cov)

pymc3/tests/test_examples.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import numpy as np
33
import pandas as pd
44
import pymc3 as pm
5-
import scipy.optimize as opt
65
import theano.tensor as tt
76
import pytest
87
import theano
@@ -195,7 +194,7 @@ def build_model(self):
195194

196195
def test_run(self):
197196
with self.build_model():
198-
start = pm.find_MAP(fmin=opt.fmin_powell)
197+
start = pm.find_MAP(method="Powell")
199198
pm.sample(50, pm.Slice(), start=start)
200199

201200

pymc3/tests/test_starting.py

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import numpy as np
33
from pymc3.tuning import starting
44
from pymc3 import Model, Uniform, Normal, Beta, Binomial, find_MAP, Point
5-
from .models import simple_model, non_normal, exponential_beta, simple_arbitrary_det
5+
from .models import simple_model, non_normal, simple_arbitrary_det
66
from .helpers import select_by_precision
77

88

@@ -20,19 +20,6 @@ def test_accuracy_non_normal():
2020
close_to(newstart['x'], mu, select_by_precision(float64=1e-5, float32=1E-4))
2121

2222

23-
def test_errors():
24-
_, model, _ = exponential_beta(2)
25-
with model:
26-
try:
27-
newstart = find_MAP(Point(x=[-.5, .01], y=[.5, 4.4]))
28-
except ValueError as e:
29-
msg = str(e)
30-
assert "x.logp" in msg, msg
31-
assert "x.value" not in msg, msg
32-
else:
33-
assert False, newstart
34-
35-
3623
def test_find_MAP_discrete():
3724
tol = 2.0**-11
3825
alpha = 4
@@ -41,8 +28,8 @@ def test_find_MAP_discrete():
4128
yes = 15
4229

4330
with Model() as model:
44-
p = Beta('p', alpha, beta, transform=None)
45-
Binomial('ss', n=n, p=p, transform=None)
31+
p = Beta('p', alpha, beta)
32+
Binomial('ss', n=n, p=p)
4633
Binomial('s', n=n, p=p, observed=yes)
4734

4835
map_est1 = starting.find_MAP()
@@ -68,14 +55,14 @@ def test_find_MAP():
6855
data = (data - np.mean(data)) / np.std(data)
6956

7057
with Model():
71-
mu = Uniform('mu', -1, 1, transform=None)
72-
sigma = Uniform('sigma', .5, 1.5, transform=None)
58+
mu = Uniform('mu', -1, 1)
59+
sigma = Uniform('sigma', .5, 1.5)
7360
Normal('y', mu=mu, tau=sigma**-2, observed=data)
7461

7562
# Test gradient minimization
7663
map_est1 = starting.find_MAP()
7764
# Test non-gradient minimization
78-
map_est2 = starting.find_MAP(fmin=starting.optimize.fmin_powell)
65+
map_est2 = starting.find_MAP(method="Powell")
7966

8067
close_to(map_est1['mu'], 0, tol)
8168
close_to(map_est1['sigma'], 1, tol)

pymc3/tests/test_tuning.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,13 @@ def test_mle_jacobian():
2222

2323
start, model, _ = models.simple_normal(bounded_prior=False)
2424
with model:
25-
map_estimate = find_MAP(model=model)
25+
map_estimate = find_MAP(method="BFGS", model=model)
2626

2727
rtol = 1E-5 # this rtol should work on both floatX precisions
2828
np.testing.assert_allclose(map_estimate["mu_i"], truth, rtol=rtol)
2929

3030
start, model, _ = models.simple_normal(bounded_prior=True)
3131
with model:
32-
map_estimate = find_MAP(model=model)
32+
map_estimate = find_MAP(method="BFGS", model=model)
3333

3434
np.testing.assert_allclose(map_estimate["mu_i"], truth, rtol=rtol)

0 commit comments

Comments
 (0)