From 0480a51e93f055fce8df11806bc11edeb94829f7 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Sat, 19 Jul 2025 19:41:33 +0200 Subject: [PATCH 01/10] added ccall model --- main.jl | 1 + models/call_C.jl | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 models/call_C.jl diff --git a/main.jl b/main.jl index 6f1e9db..3682c03 100644 --- a/main.jl +++ b/main.jl @@ -76,6 +76,7 @@ end # although it's hardly a big deal. @include_model "Base Julia features" "control_flow" @include_model "Base Julia features" "multithreaded" +@include_model "Base Julia features" "call_C" @include_model "Core Turing syntax" "broadcast_macro" @include_model "Core Turing syntax" "dot_assume" @include_model "Core Turing syntax" "dot_observe" diff --git a/models/call_C.jl b/models/call_C.jl new file mode 100644 index 0000000..ea6c751 --- /dev/null +++ b/models/call_C.jl @@ -0,0 +1,15 @@ +# Get name of C standard library depending on the platform +libc_name = Sys.iswindows() ? "msvcrt.dll" : + Sys.isapple() ? "libc.dylib" : + "libc.so.6" + +@model function call_C(y) + x ~ Normal(0, 1) + + # Call C library abs function + x_abs = @ccall libc_name.fabs(x::Cdouble)::Cdouble + + y ~ Normal(0, x_abs) +end + +model = call_C() \ No newline at end of file From 95e60ead5bc80b02e812986e19a5b57caf00846d Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Sat, 19 Jul 2025 19:42:25 +0200 Subject: [PATCH 02/10] added default y --- models/call_C.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/call_C.jl b/models/call_C.jl index ea6c751..a7bab57 100644 --- a/models/call_C.jl +++ b/models/call_C.jl @@ -3,7 +3,7 @@ libc_name = Sys.iswindows() ? "msvcrt.dll" : Sys.isapple() ? "libc.dylib" : "libc.so.6" -@model function call_C(y) +@model function call_C(y = 0.0) x ~ Normal(0, 1) # Call C library abs function From 87384dd9c5cb893d5756b0acc8315efca40caa83 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Sat, 19 Jul 2025 20:00:21 +0200 Subject: [PATCH 03/10] added metabayesian model --- main.jl | 1 + models/metabayesian_MH.jl | 48 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 models/metabayesian_MH.jl diff --git a/main.jl b/main.jl index 3682c03..de30722 100644 --- a/main.jl +++ b/main.jl @@ -115,6 +115,7 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" +@include_model "Miscellaneous features" "metabayesian_MH" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl new file mode 100644 index 0000000..0fa1b08 --- /dev/null +++ b/models/metabayesian_MH.jl @@ -0,0 +1,48 @@ +#= +This is a "meta-Bayesian" model, where the generative model includes an inversion of a different generative model. +These types of models are common in cognitive modelling, where systems of interest (e.g. human subjects) are thought to use Bayesian inference to navigate their environment. +Here we use a Metropolis-Hasting sampler implemented with Turing as the inversion of the inner "subjective" model. +=# + +# Inner model function +@model function inner_model(observation, prior_μ = 0, prior_σ = 1) + + # The innter model's prior + mean ~ Normal(prior_μ, prior_σ) + + # The inner model's likelihood + observation ~ Normal(mean, 1) +end + +# Outer model function +@model function metabayesian_MH(observation, action, inner_sampler = MH(), inner_n_samples = 20) + + ### Sample parameters for the inner inference and response ### + + #The inner model's prior's sufficient statistics + subj_prior_μ ~ Normal(0, 1) + subj_prior_σ = 1.0 + + # #Inverse temperature for actions + β ~ Exponential(1) + + ### "Perceptual inference": running the inner model ### + + #Condition the inner model + inner_m = inner_model(observation, subj_prior_μ, subj_prior_σ) + + #Run the inner Bayesian inference + chns = sample(inner_m, inner_sampler, inner_n_samples, progress = false) + + #Extract subjective point estimate + subj_mean_expectationₜ = mean(chns[:mean]) + + + ### "Response model": picking an action ### + + #The action is a Gaussian-noise report of the subjective point estimate + action ~ Normal(subj_mean_expectationₜ, β) + +end + +model = metabayesian_MH(0.0, 1.0) \ No newline at end of file From 8bc8c5e77b1d27fb0d1b378de658ed4d67d7df75 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Tue, 22 Jul 2025 09:03:04 +0200 Subject: [PATCH 04/10] removed C library naming --- models/call_C.jl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/models/call_C.jl b/models/call_C.jl index a7bab57..9bd3737 100644 --- a/models/call_C.jl +++ b/models/call_C.jl @@ -1,13 +1,8 @@ -# Get name of C standard library depending on the platform -libc_name = Sys.iswindows() ? "msvcrt.dll" : - Sys.isapple() ? "libc.dylib" : - "libc.so.6" - @model function call_C(y = 0.0) x ~ Normal(0, 1) # Call C library abs function - x_abs = @ccall libc_name.fabs(x::Cdouble)::Cdouble + x_abs = @ccall fabs(x::Cdouble)::Cdouble y ~ Normal(0, x_abs) end From c049fd8197abea4a06f7cf16a353a8dc0b103e4d Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Tue, 22 Jul 2025 11:02:44 +0200 Subject: [PATCH 05/10] removed inverse temperature paramter from metabayesian MH model --- models/metabayesian_MH.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl index 0fa1b08..6a555bb 100644 --- a/models/metabayesian_MH.jl +++ b/models/metabayesian_MH.jl @@ -24,7 +24,7 @@ end subj_prior_σ = 1.0 # #Inverse temperature for actions - β ~ Exponential(1) + β ~ 1 ### "Perceptual inference": running the inner model ### From 887b82a4dbd5d54e9b46dc5b63ba38dcdb63b03b Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Tue, 22 Jul 2025 18:19:48 +0100 Subject: [PATCH 06/10] Use FiniteDifferences as comparison; fix parameter generation --- main.jl | 40 ++++++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/main.jl b/main.jl index de30722..51219a1 100644 --- a/main.jl +++ b/main.jl @@ -1,6 +1,7 @@ using DynamicPPL: DynamicPPL, VarInfo using DynamicPPL.TestUtils.AD: run_ad, ADResult, ADIncorrectException using ADTypes +using Random: Xoshiro import FiniteDifferences: central_fdm import ForwardDiff @@ -11,13 +12,13 @@ import Zygote # AD backends to test. ADTYPES = Dict( - "FiniteDifferences" => AutoFiniteDifferences(; fdm=central_fdm(5, 1)), + "FiniteDifferences" => AutoFiniteDifferences(; fdm = central_fdm(5, 1)), "ForwardDiff" => AutoForwardDiff(), - "ReverseDiff" => AutoReverseDiff(; compile=false), - "ReverseDiffCompiled" => AutoReverseDiff(; compile=true), - "Mooncake" => AutoMooncake(; config=nothing), - "EnzymeForward" => AutoEnzyme(; mode=set_runtime_activity(Forward, true)), - "EnzymeReverse" => AutoEnzyme(; mode=set_runtime_activity(Reverse, true)), + "ReverseDiff" => AutoReverseDiff(; compile = false), + "ReverseDiffCompiled" => AutoReverseDiff(; compile = true), + "Mooncake" => AutoMooncake(; config = nothing), + "EnzymeForward" => AutoEnzyme(; mode = set_runtime_activity(Forward, true)), + "EnzymeReverse" => AutoEnzyme(; mode = set_runtime_activity(Reverse, true)), "Zygote" => AutoZygote(), ) @@ -56,14 +57,12 @@ macro include_model(category::AbstractString, model_name::AbstractString) if MODELS_TO_LOAD == "__all__" || model_name in split(MODELS_TO_LOAD, ",") # Declare a module containing the model. In principle esc() shouldn't # be needed, but see https://github.com/JuliaLang/julia/issues/55677 - Expr(:toplevel, esc(:( - module $(gensym()) - using .Main: @register - using Turing - include("models/" * $(model_name) * ".jl") - @register $(category) model - end - ))) + Expr(:toplevel, esc(:(module $(gensym()) + using .Main: @register + using Turing + include("models/" * $(model_name) * ".jl") + @register $(category) model + end))) else # Empty expression :() @@ -133,9 +132,18 @@ elseif length(ARGS) == 3 && ARGS[1] == "--run" # https://github.com/TuringLang/ADTests/issues/4 vi = DynamicPPL.unflatten(VarInfo(model), [0.5, -0.5]) params = [-0.5, 0.5] - result = run_ad(model, adtype; varinfo=vi, params=params, benchmark=true) + result = run_ad(model, adtype; varinfo = vi, params = params, benchmark = true) else - result = run_ad(model, adtype; benchmark=true) + vi = VarInfo(Xoshiro(468), model) + linked_vi = DynamicPPL.link!!(vi, model) + params = linked_vi[:] + result = run_ad( + model, + adtype; + params = params, + reference_adtype = ADTYPES["FiniteDifferences"], + benchmark = true, + ) end # If reached here - nothing went wrong println(result.time_vs_primal) From 18d32a8aefca8a58351d5735cb2ec59126672c1a Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Tue, 22 Jul 2025 18:25:17 +0100 Subject: [PATCH 07/10] remove extra tilde --- models/metabayesian_MH.jl | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl index 6a555bb..9961907 100644 --- a/models/metabayesian_MH.jl +++ b/models/metabayesian_MH.jl @@ -6,7 +6,7 @@ Here we use a Metropolis-Hasting sampler implemented with Turing as the inversio # Inner model function @model function inner_model(observation, prior_μ = 0, prior_σ = 1) - + # The innter model's prior mean ~ Normal(prior_μ, prior_σ) @@ -15,16 +15,21 @@ Here we use a Metropolis-Hasting sampler implemented with Turing as the inversio end # Outer model function -@model function metabayesian_MH(observation, action, inner_sampler = MH(), inner_n_samples = 20) - +@model function metabayesian_MH( + observation, + action, + inner_sampler = MH(), + inner_n_samples = 20, +) + ### Sample parameters for the inner inference and response ### #The inner model's prior's sufficient statistics subj_prior_μ ~ Normal(0, 1) - subj_prior_σ = 1.0 - + subj_prior_σ = 1.0 + # #Inverse temperature for actions - β ~ 1 + β = 1 ### "Perceptual inference": running the inner model ### @@ -45,4 +50,4 @@ end end -model = metabayesian_MH(0.0, 1.0) \ No newline at end of file +model = metabayesian_MH(0.0, 1.0) From 70643f100d556798c67306dc7cf4da2d21b564b8 Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Tue, 22 Jul 2025 18:25:27 +0100 Subject: [PATCH 08/10] Format --- models/broadcast_macro.jl | 5 +---- models/call_C.jl | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/models/broadcast_macro.jl b/models/broadcast_macro.jl index e1e8b12..a6c0752 100644 --- a/models/broadcast_macro.jl +++ b/models/broadcast_macro.jl @@ -1,7 +1,4 @@ -@model function broadcast_macro( - x = [1.5, 2.0], - ::Type{TV} = Vector{Float64}, -) where {TV} +@model function broadcast_macro(x = [1.5, 2.0], ::Type{TV} = Vector{Float64}) where {TV} a ~ Normal(0, 1) b ~ InverseGamma(2, 3) @. x ~ Normal(a, $(sqrt(b))) diff --git a/models/call_C.jl b/models/call_C.jl index 9bd3737..d28ac00 100644 --- a/models/call_C.jl +++ b/models/call_C.jl @@ -7,4 +7,4 @@ y ~ Normal(0, x_abs) end -model = call_C() \ No newline at end of file +model = call_C() From 7f58934846bbf506ecfbd54bd4f8ba8883c2c3df Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Thu, 24 Jul 2025 11:43:41 +0100 Subject: [PATCH 09/10] Seed inner sampling to ensure reproducibility of gradients --- models/metabayesian_MH.jl | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl index 9961907..7781bf3 100644 --- a/models/metabayesian_MH.jl +++ b/models/metabayesian_MH.jl @@ -3,13 +3,12 @@ This is a "meta-Bayesian" model, where the generative model includes an inversio These types of models are common in cognitive modelling, where systems of interest (e.g. human subjects) are thought to use Bayesian inference to navigate their environment. Here we use a Metropolis-Hasting sampler implemented with Turing as the inversion of the inner "subjective" model. =# +using Random: Xoshiro # Inner model function @model function inner_model(observation, prior_μ = 0, prior_σ = 1) - - # The innter model's prior + # The inner model's prior mean ~ Normal(prior_μ, prior_σ) - # The inner model's likelihood observation ~ Normal(mean, 1) end @@ -21,33 +20,25 @@ end inner_sampler = MH(), inner_n_samples = 20, ) - ### Sample parameters for the inner inference and response ### - - #The inner model's prior's sufficient statistics + # The inner model's prior's sufficient statistics subj_prior_μ ~ Normal(0, 1) subj_prior_σ = 1.0 - - # #Inverse temperature for actions + # Inverse temperature for actions β = 1 ### "Perceptual inference": running the inner model ### - - #Condition the inner model + # Condition the inner model inner_m = inner_model(observation, subj_prior_μ, subj_prior_σ) - - #Run the inner Bayesian inference - chns = sample(inner_m, inner_sampler, inner_n_samples, progress = false) - - #Extract subjective point estimate + # Run the inner Bayesian inference + chns = sample(Xoshiro(468), inner_m, inner_sampler, inner_n_samples, progress = false) + # Extract subjective point estimate subj_mean_expectationₜ = mean(chns[:mean]) ### "Response model": picking an action ### - - #The action is a Gaussian-noise report of the subjective point estimate + # The action is a Gaussian-noise report of the subjective point estimate action ~ Normal(subj_mean_expectationₜ, β) - end model = metabayesian_MH(0.0, 1.0) From c4d941b6bbbad87dacb0227a7b51cffd212af210 Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Thu, 24 Jul 2025 12:16:30 +0100 Subject: [PATCH 10/10] Revert beta to Exponential(1) --- models/metabayesian_MH.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/metabayesian_MH.jl b/models/metabayesian_MH.jl index 7781bf3..377bd7f 100644 --- a/models/metabayesian_MH.jl +++ b/models/metabayesian_MH.jl @@ -25,7 +25,7 @@ end subj_prior_μ ~ Normal(0, 1) subj_prior_σ = 1.0 # Inverse temperature for actions - β = 1 + β ~ Exponential(1) ### "Perceptual inference": running the inner model ### # Condition the inner model