diff --git a/main.jl b/main.jl index 6f1e9db..51219a1 100644 --- a/main.jl +++ b/main.jl @@ -1,6 +1,7 @@ using DynamicPPL: DynamicPPL, VarInfo using DynamicPPL.TestUtils.AD: run_ad, ADResult, ADIncorrectException using ADTypes +using Random: Xoshiro import FiniteDifferences: central_fdm import ForwardDiff @@ -11,13 +12,13 @@ import Zygote # AD backends to test. ADTYPES = Dict( - "FiniteDifferences" => AutoFiniteDifferences(; fdm=central_fdm(5, 1)), + "FiniteDifferences" => AutoFiniteDifferences(; fdm = central_fdm(5, 1)), "ForwardDiff" => AutoForwardDiff(), - "ReverseDiff" => AutoReverseDiff(; compile=false), - "ReverseDiffCompiled" => AutoReverseDiff(; compile=true), - "Mooncake" => AutoMooncake(; config=nothing), - "EnzymeForward" => AutoEnzyme(; mode=set_runtime_activity(Forward, true)), - "EnzymeReverse" => AutoEnzyme(; mode=set_runtime_activity(Reverse, true)), + "ReverseDiff" => AutoReverseDiff(; compile = false), + "ReverseDiffCompiled" => AutoReverseDiff(; compile = true), + "Mooncake" => AutoMooncake(; config = nothing), + "EnzymeForward" => AutoEnzyme(; mode = set_runtime_activity(Forward, true)), + "EnzymeReverse" => AutoEnzyme(; mode = set_runtime_activity(Reverse, true)), "Zygote" => AutoZygote(), ) @@ -56,14 +57,12 @@ macro include_model(category::AbstractString, model_name::AbstractString) if MODELS_TO_LOAD == "__all__" || model_name in split(MODELS_TO_LOAD, ",") # Declare a module containing the model. In principle esc() shouldn't # be needed, but see https://github.com/JuliaLang/julia/issues/55677 - Expr(:toplevel, esc(:( - module $(gensym()) - using .Main: @register - using Turing - include("models/" * $(model_name) * ".jl") - @register $(category) model - end - ))) + Expr(:toplevel, esc(:(module $(gensym()) + using .Main: @register + using Turing + include("models/" * $(model_name) * ".jl") + @register $(category) model + end))) else # Empty expression :() @@ -76,6 +75,7 @@ end # although it's hardly a big deal. @include_model "Base Julia features" "control_flow" @include_model "Base Julia features" "multithreaded" +@include_model "Base Julia features" "call_C" @include_model "Core Turing syntax" "broadcast_macro" @include_model "Core Turing syntax" "dot_assume" @include_model "Core Turing syntax" "dot_observe" @@ -114,6 +114,7 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" +@include_model "Miscellaneous features" "metabayesian_MH" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] @@ -131,9 +132,18 @@ elseif length(ARGS) == 3 && ARGS[1] == "--run" # https://github.com/TuringLang/ADTests/issues/4 vi = DynamicPPL.unflatten(VarInfo(model), [0.5, -0.5]) params = [-0.5, 0.5] - result = run_ad(model, adtype; varinfo=vi, params=params, benchmark=true) + result = run_ad(model, adtype; varinfo = vi, params = params, benchmark = true) else - result = run_ad(model, adtype; benchmark=true) + vi = VarInfo(Xoshiro(468), model) + linked_vi = DynamicPPL.link!!(vi, model) + params = linked_vi[:] + result = run_ad( + model, + adtype; + params = params, + reference_adtype = ADTYPES["FiniteDifferences"], + benchmark = true, + ) end # If reached here - nothing went wrong println(result.time_vs_primal) diff --git a/models/broadcast_macro.jl b/models/broadcast_macro.jl index e1e8b12..a6c0752 100644 --- a/models/broadcast_macro.jl +++ b/models/broadcast_macro.jl @@ -1,7 +1,4 @@ -@model function broadcast_macro( - x = [1.5, 2.0], - ::Type{TV} = Vector{Float64}, -) where {TV} +@model function broadcast_macro(x = [1.5, 2.0], ::Type{TV} = Vector{Float64}) where {TV} a ~ Normal(0, 1) b ~ InverseGamma(2, 3) @. x ~ Normal(a, $(sqrt(b))) diff --git a/models/call_C.jl b/models/call_C.jl new file mode 100644 index 0000000..d28ac00 --- /dev/null +++ b/models/call_C.jl @@ -0,0 +1,10 @@ +@model function call_C(y = 0.0) + x ~ Normal(0, 1) + + # Call C library abs function + x_abs = @ccall fabs(x::Cdouble)::Cdouble + + y ~ Normal(0, x_abs) +end + +model = call_C() diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl new file mode 100644 index 0000000..377bd7f --- /dev/null +++ b/models/metabayesian_MH.jl @@ -0,0 +1,44 @@ +#= +This is a "meta-Bayesian" model, where the generative model includes an inversion of a different generative model. +These types of models are common in cognitive modelling, where systems of interest (e.g. human subjects) are thought to use Bayesian inference to navigate their environment. +Here we use a Metropolis-Hasting sampler implemented with Turing as the inversion of the inner "subjective" model. +=# +using Random: Xoshiro + +# Inner model function +@model function inner_model(observation, prior_μ = 0, prior_σ = 1) + # The inner model's prior + mean ~ Normal(prior_μ, prior_σ) + # The inner model's likelihood + observation ~ Normal(mean, 1) +end + +# Outer model function +@model function metabayesian_MH( + observation, + action, + inner_sampler = MH(), + inner_n_samples = 20, +) + ### Sample parameters for the inner inference and response ### + # The inner model's prior's sufficient statistics + subj_prior_μ ~ Normal(0, 1) + subj_prior_σ = 1.0 + # Inverse temperature for actions + β ~ Exponential(1) + + ### "Perceptual inference": running the inner model ### + # Condition the inner model + inner_m = inner_model(observation, subj_prior_μ, subj_prior_σ) + # Run the inner Bayesian inference + chns = sample(Xoshiro(468), inner_m, inner_sampler, inner_n_samples, progress = false) + # Extract subjective point estimate + subj_mean_expectationₜ = mean(chns[:mean]) + + + ### "Response model": picking an action ### + # The action is a Gaussian-noise report of the subjective point estimate + action ~ Normal(subj_mean_expectationₜ, β) +end + +model = metabayesian_MH(0.0, 1.0)