@@ -6,7 +6,7 @@ import DifferentiationInterface as DI
6
6
using DocStringExtensions
7
7
using DynamicPPL: Model, LogDensityFunction, VarInfo, AbstractVarInfo, link
8
8
using LogDensityProblems: logdensity, logdensity_and_gradient
9
- using Random: Random, Xoshiro
9
+ using Random: AbstractRNG, default_rng
10
10
using Statistics: median
11
11
using Test: @test
12
12
@@ -160,8 +160,8 @@ Everything else is optional, and can be categorised into several groups:
160
160
161
161
Note that if the VarInfo is not specified (and thus automatically generated)
162
162
the parameters in it will have been sampled from the prior of the model. If
163
- you want to seed the parameter generation, the easiest way is to pass a
164
- `rng` argument to the VarInfo constructor (i.e. do `VarInfo(rng, model)`) .
163
+ you want to seed the parameter generation for the VarInfo, you can pass the
164
+ `rng` keyword argument, which will then be used to create the VarInfo .
165
165
166
166
Finally, note that these only reflect the parameters used for _evaluating_
167
167
the gradient. If you also want to control the parameters used for
@@ -214,7 +214,8 @@ function run_ad(
214
214
benchmark:: Bool = false ,
215
215
value_atol:: AbstractFloat = 1e-6 ,
216
216
grad_atol:: AbstractFloat = 1e-6 ,
217
- varinfo:: AbstractVarInfo = link (VarInfo (model), model),
217
+ rng:: AbstractRNG = default_rng (),
218
+ varinfo:: AbstractVarInfo = link (VarInfo (rng, model), model),
218
219
params:: Union{Nothing,Vector{<:AbstractFloat}} = nothing ,
219
220
verbose= true ,
220
221
):: ADResult
0 commit comments