|
| 1 | +using .ForwardDiff: gradient! |
| 2 | +using .DiffResults: GradientResult, value, gradient |
| 3 | + |
| 4 | +struct MALA{D} <: MHSampler |
| 5 | + proposal::D |
| 6 | +end |
| 7 | + |
| 8 | + |
| 9 | +# Create a RandomWalkProposal if we weren't given one already. |
| 10 | +MALA(d) = MALA(RandomWalkProposal(d)) |
| 11 | + |
| 12 | +# If we were given a RandomWalkProposal, just use that instead. |
| 13 | +MALA(d::RandomWalkProposal) = MALA{typeof(d)}(d) |
| 14 | + |
| 15 | + |
| 16 | +struct GradientTransition{T<:Union{Vector, Real, NamedTuple}, L<:Real, G<:Union{Vector, Real, NamedTuple}} <: AbstractTransition |
| 17 | + params::T |
| 18 | + lp::L |
| 19 | + gradient::G |
| 20 | +end |
| 21 | + |
| 22 | +transition(::MALA, model, params) = GradientTransition(model, params) |
| 23 | + |
| 24 | +# Store the new draw, its log density and its gradient |
| 25 | +GradientTransition(model::DensityModel, params) = GradientTransition(params, logdensity_and_gradient(model, params)...) |
| 26 | + |
| 27 | +propose(rng::Random.AbstractRNG, ::MALA, model) = error("please specify initial parameters") |
| 28 | + |
| 29 | +function propose( |
| 30 | + rng::Random.AbstractRNG, |
| 31 | + spl::MALA{<:Proposal}, |
| 32 | + model::DensityModel, |
| 33 | + params_prev::GradientTransition |
| 34 | +) |
| 35 | + proposal = propose(rng, spl.proposal(params_prev.gradient), model, params_prev.params) |
| 36 | + return GradientTransition(model, proposal) |
| 37 | +end |
| 38 | + |
| 39 | + |
| 40 | +function q( |
| 41 | + spl::MALA{<:Proposal}, |
| 42 | + t::GradientTransition, |
| 43 | + t_cond::GradientTransition |
| 44 | +) |
| 45 | + return q(spl.proposal(-t_cond.gradient), t.params, t_cond.params) |
| 46 | +end |
| 47 | + |
| 48 | + |
| 49 | +""" |
| 50 | + logdensity_and_gradient(model::DensityModel, params) |
| 51 | +
|
| 52 | +Efficiently returns the value and gradient of the model |
| 53 | +""" |
| 54 | +function logdensity_and_gradient(model::DensityModel, params) |
| 55 | + res = GradientResult(params) |
| 56 | + gradient!(res, model.logdensity, params) |
| 57 | + return (value(res), gradient(res)) |
| 58 | +end |
| 59 | + |
| 60 | + |
| 61 | +logdensity(model::DensityModel, t::GradientTransition) = t.lp |
0 commit comments