Skip to content

Commit 39ed379

Browse files
author
Junpeng Lao
committed
[WIP] Update examples
* use new sampling api (advi init, etc) * unify style
1 parent 0e54ce6 commit 39ed379

File tree

3 files changed

+46
-44
lines changed

3 files changed

+46
-44
lines changed

pymc3/examples/arbitrary_stochastic.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,23 +4,25 @@
44

55

66
def build_model():
7+
# data
8+
failure = np.array([0., 1.])
9+
value = np.array([1., 0.])
10+
11+
# custom log-liklihood
12+
def logp(failure, value):
13+
return tt.sum(failure * tt.log(lam) - lam * value)
14+
15+
# model
716
with pm.Model() as model:
8-
lam = pm.Exponential('lam', 1)
9-
failure = np.array([0, 1])
10-
value = np.array([1, 0])
11-
12-
def logp(failure, value):
13-
return tt.sum(failure * np.log(lam) - lam * value)
17+
lam = pm.Exponential('lam', 1.)
1418
pm.DensityDist('x', logp, observed={'failure': failure, 'value': value})
1519
return model
1620

1721

1822
def run(n_samples=3000):
1923
model = build_model()
20-
start = model.test_point
21-
h = pm.find_hessian(start, model=model)
22-
step = pm.Metropolis(model.vars, h, blocked=True, model=model)
23-
trace = pm.sample(n_samples, step=step, start=start, model=model)
24+
with model:
25+
trace = pm.sample(n_samples)
2426
return trace
2527

2628
if __name__ == "__main__":

pymc3/examples/arma_example.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
from pymc3 import Normal, sample, Model, plots, Potential, variational, HalfCauchy
1+
import pymc3 as pm
22
from theano import scan, shared
33

44
import numpy as np
55
"""
66
ARMA example
7-
It is interesting to note just how much more compact this is that the original STAN example
7+
It is interesting to note just how much more compact this is than the original STAN example
88
99
The original implementation is in the STAN documentation by Gelman et al and is reproduced below
1010
@@ -52,11 +52,11 @@
5252

5353
def build_model():
5454
y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
55-
with Model() as arma_model:
56-
sigma = HalfCauchy('sigma', 5)
57-
theta = Normal('theta', 0, sd=2)
58-
phi = Normal('phi', 0, sd=2)
59-
mu = Normal('mu', 0, sd=10)
55+
with pm.Model() as arma_model:
56+
sigma = pm.HalfCauchy('sigma', 5.)
57+
theta = pm.Normal('theta', 0., sd=2.)
58+
phi = pm.Normal('phi', 0., sd=2.)
59+
mu = pm.Normal('mu', 0., sd=10.)
6060

6161
err0 = y[0] - (mu + phi * mu)
6262

@@ -69,19 +69,18 @@ def calc_next(last_y, this_y, err, mu, phi, theta):
6969
outputs_info=[err0],
7070
non_sequences=[mu, phi, theta])
7171

72-
Potential('like', Normal.dist(0, sd=sigma).logp(err))
73-
variational.advi(n=2000)
72+
pm.Potential('like', pm.Normal.dist(0, sd=sigma).logp(err))
7473
return arma_model
7574

7675

7776
def run(n_samples=1000):
7877
model = build_model()
7978
with model:
80-
trace = sample(draws=n_samples)
79+
trace = pm.sample(draws=n_samples)
8180

8281
burn = n_samples // 10
83-
plots.traceplot(trace[burn:])
84-
plots.forestplot(trace[burn:])
82+
pm.plots.traceplot(trace[burn:])
83+
pm.plots.forestplot(trace[burn:])
8584

8685

8786
if __name__ == '__main__':

pymc3/examples/baseball.py

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,33 +5,34 @@
55

66
import pymc3 as pm
77
import numpy as np
8-
import theano
98

10-
data = np.loadtxt(pm.get_data('efron-morris-75-data.tsv'), delimiter="\t", skiprows=1, usecols=(2,3))
11-
12-
atBats = data[:,0].astype(theano.config.floatX)
13-
hits = data[:,1].astype(theano.config.floatX)
14-
15-
N = len( hits )
16-
17-
model = pm.Model()
18-
19-
# we want to bound the kappa below
20-
BoundedKappa = pm.Bound( pm.Pareto, lower=1.0 )
21-
22-
with model:
23-
phi = pm.Uniform( 'phi', lower=0.0, upper=1.0 )
24-
kappa = BoundedKappa( 'kappa', alpha=1.0001, m=1.5 )
25-
thetas = pm.Beta( 'thetas', alpha=phi*kappa, beta=(1.0-phi)*kappa, shape=N )
26-
ys = pm.Binomial( 'ys', n=atBats, p=thetas, observed=hits )
27-
28-
def run( n=100000 ):
9+
def build_model():
10+
data = np.loadtxt(pm.get_data('efron-morris-75-data.tsv'), delimiter="\t",
11+
skiprows=1, usecols=(2,3))
12+
13+
atBats = pm.floatX(data[:,0])
14+
hits = pm.floatX(data[:,1])
15+
16+
N = len(hits)
17+
18+
# we want to bound the kappa below
19+
BoundedKappa = pm.Bound(pm.Pareto, lower=1.0)
20+
21+
with pm.Model() as model:
22+
phi = pm.Uniform('phi', lower=0.0, upper=1.0)
23+
kappa = BoundedKappa('kappa', alpha=1.0001, m=1.5)
24+
thetas = pm.Beta('thetas', alpha=phi*kappa, beta=(1.0-phi)*kappa, shape=N)
25+
ys = pm.Binomial('ys', n=atBats, p=thetas, observed=hits)
26+
return model
27+
28+
def run(n=2000):
29+
model = build_model()
2930
with model:
3031
# initialize NUTS() with ADVI under the hood
31-
trace = pm.sample( n )
32+
trace = pm.sample(n, nuts_kwargs={'target_accept':.99})
3233

3334
# drop some first samples as burnin
34-
pm.traceplot( trace[1000:] )
35+
pm.traceplot(trace[1000:])
3536

3637
if __name__ == '__main__':
3738
run()

0 commit comments

Comments
 (0)