|
| 1 | +# Modified from original implementation by Dominik Wabersich (2013) |
| 2 | + |
| 3 | +from ..core import * |
| 4 | +from arraystep import * |
| 5 | +from numpy import floor, abs, atleast_1d |
| 6 | +from numpy.random import standard_exponential, random, uniform |
| 7 | + |
| 8 | +__all__ = ['Slice'] |
| 9 | + |
| 10 | +class Slice(ArrayStep): |
| 11 | + """Slice sampler""" |
| 12 | + def __init__(self, vars, w=1, m=20, n_tune=0, |
| 13 | + tune=True, tune_interval=100, model=None): |
| 14 | + |
| 15 | + model = modelcontext(model) |
| 16 | + |
| 17 | + self.vars = vars |
| 18 | + self.w = w |
| 19 | + self.m = m |
| 20 | + self.n_tune = n_tune |
| 21 | + self.w_tune = [] |
| 22 | + self.tune = tune |
| 23 | + self.tune_interval = tune_interval |
| 24 | + self.model = model |
| 25 | + |
| 26 | + super(Slice,self).__init__(vars, [model.logpc]) |
| 27 | + |
| 28 | + def astep(self, q0, logp): |
| 29 | + |
| 30 | + y = logp(q0) - standard_exponential() |
| 31 | + |
| 32 | + # Stepping out procedure |
| 33 | + L = q0 - self.w*random() |
| 34 | + R = L + self.w |
| 35 | + J = floor(self.m*random()) |
| 36 | + K = (self.m - 1) - J |
| 37 | + |
| 38 | + while(J>0 and y<logp(L)): |
| 39 | + L = L - self.w |
| 40 | + J = J - 1 |
| 41 | + |
| 42 | + while(K>0 and y<logp(R)): |
| 43 | + R = R + self.w |
| 44 | + K = K - 1 |
| 45 | + |
| 46 | + # Sample uniformly from slice |
| 47 | + q_new = atleast_1d(uniform(L, R)) |
| 48 | + y_new = logp(q_new) |
| 49 | + |
| 50 | + while(y_new<y): |
| 51 | + # Shrink bounds of uniform |
| 52 | + if (q_new < q0): |
| 53 | + L = q_new |
| 54 | + else: |
| 55 | + R = q_new |
| 56 | + q_new = atleast_1d(uniform(L,R)) |
| 57 | + y_new = logp(q_new) |
| 58 | + |
| 59 | + if (len(self.w_tune) > self.n_tune): |
| 60 | + # Tune sampler parameters |
| 61 | + self.w_tune.append(abs(q0 - q_new)) |
| 62 | + self.w = 2 * sum(self.w_tune)/len(self.w_tune) |
| 63 | + |
| 64 | + return q_new |
0 commit comments