@@ -325,7 +325,8 @@ def adagrad(grad, param, learning_rate, epsilon, n):
325
325
tt .sqrt (accu_sum + epsilon ))
326
326
return updates
327
327
328
- def sample_vp (vparams , draws = 1000 , model = None , random_seed = 20090425 ):
328
+ def sample_vp (vparams , draws = 1000 , model = None , random_seed = 20090425 ,
329
+ hide_transformed = True ):
329
330
"""Draw samples from variational posterior.
330
331
331
332
Parameters
@@ -338,6 +339,8 @@ def sample_vp(vparams, draws=1000, model=None, random_seed=20090425):
338
339
Probabilistic model.
339
340
random_seed : int
340
341
Seed of random number generator.
342
+ hide_transformed : bool
343
+ If False, transformed variables are also sampled. Default is True.
341
344
342
345
Returns
343
346
-------
@@ -366,8 +369,13 @@ def sample_vp(vparams, draws=1000, model=None, random_seed=20090425):
366
369
samples = theano .clone (vars , updates )
367
370
f = theano .function ([], samples )
368
371
372
+ # Random variables which will be sampled
373
+ vars_sampled = [v for v in model .unobserved_RVs if not str (v ).endswith ('_' )] \
374
+ if hide_transformed else \
375
+ [v for v in model .unobserved_RVs ]
376
+
369
377
varnames = [str (var ) for var in model .unobserved_RVs ]
370
- trace = NDArray (model = model , vars = model . unobserved_RVs )
378
+ trace = NDArray (model = model , vars = vars_sampled )
371
379
trace .setup (draws = draws , chain = 0 )
372
380
373
381
for i in range (draws ):
0 commit comments