diff --git a/Project.toml b/Project.toml index 1871588..76f1ebb 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "SliceSampling" uuid = "43f4d3e8-9711-4a8c-bd1b-03ac73a255cf" -version = "0.7.1" +version = "0.7.2" [deps] AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001" diff --git a/docs/src/gibbs_polar.md b/docs/src/gibbs_polar.md index d87a4d1..8372708 100644 --- a/docs/src/gibbs_polar.md +++ b/docs/src/gibbs_polar.md @@ -2,7 +2,7 @@ # [Gibbsian Polar Slice Sampling](@id polar) ## Introduction -Gibbsian polar slice sampling (GPSS) is a recent vector-valued slice sampling algorithm proposed by P. Schär, M. Habeck, and D. Rudolf[^SHR2023]. +Gibbsian polar slice sampling (GPSS) is a multivariate slice sampling algorithm proposed by P. Schär, M. Habeck, and D. Rudolf[^SHR2023]. It is an computationally efficient variant of polar slice sampler previously proposed by Roberts and Rosenthal[^RR2002]. Unlike other slice sampling algorithms, it operates a Gibbs sampler over polar coordinates, reminiscent of the elliptical slice sampler (ESS). Due to the involvement of polar coordinates, GPSS only works reliably on more than one dimension. diff --git a/src/multivariate/randpermgibbs.jl b/src/multivariate/randpermgibbs.jl index c8ba434..84bc100 100644 --- a/src/multivariate/randpermgibbs.jl +++ b/src/multivariate/randpermgibbs.jl @@ -25,9 +25,7 @@ struct GibbsState{T<:Transition} transition::T end -function AbstractMCMC.setparams!!( - model::AbstractMCMC.LogDensityModel, state::GibbsState, params -) +function AbstractMCMC.setparams!!(model::AbstractMCMC.LogDensityModel, ::GibbsState, params) lp = LogDensityProblems.logdensity(model.logdensity, params) return GibbsState(Transition(params, lp, NamedTuple())) end diff --git a/test/multivariate.jl b/test/multivariate.jl index 5e081ae..67c8ccb 100644 --- a/test/multivariate.jl +++ b/test/multivariate.jl @@ -59,7 +59,7 @@ function LogDensityProblems.dimension(model::MultiModel) end @testset "multivariate samplers" begin - model = MultiModel(1.0, 1.0, [0.0]) + model = MultiModel(3.0, 3.0, [0.0]) @testset for sampler in [ # Vector-valued windows RandPermGibbs(Slice.(fill(1, LogDensityProblems.dimension(model)))), @@ -74,14 +74,12 @@ end HitAndRun(SliceSteppingOut(1)), HitAndRun(SliceDoublingOut(1)), - # Latent slice sampling + # Multivariate slice samplers LatentSlice(5), - - # Gibbsian polar slice sampling GibbsPolarSlice(100), ] @testset "initial_params" begin - model = MultiModel(1.0, 1.0, [0.0]) + model = MultiModel(3.0, 3.0, [0.0]) θ, y = MCMCTesting.sample_joint(Random.default_rng(), model) model′ = AbstractMCMC.LogDensityModel(@set model.y = y) @@ -92,7 +90,7 @@ end @testset "initial_sample" begin rng = StableRNG(1) - model = MultiModel(1.0, 1.0, [0.0]) + model = MultiModel(3.0, 3.0, [0.0]) θ0 = SliceSampling.initial_sample(rng, model) rng = StableRNG(1) @@ -101,7 +99,7 @@ end end @testset "determinism" begin - model = MultiModel(1.0, 1.0, [0.0]) + model = MultiModel(3.0, 3.0, [0.0]) θ, y = MCMCTesting.sample_joint(Random.default_rng(), model) model′ = AbstractMCMC.LogDensityModel(@set model.y = y) @@ -140,7 +138,7 @@ end n_mcmc_thin = 10 test = ExactRankTest(n_samples, n_mcmc_steps, n_mcmc_thin) - model = MultiModel(1.0, 1.0, [0.0]) + model = MultiModel(3.0, 3.0, [0.0]) subject = TestSubject(model, sampler) @test seqmcmctest(test, subject, 0.001, n_pvalue_samples; show_progress=false) end diff --git a/test/turing.jl b/test/turing.jl index 032fa76..f6c2e89 100644 --- a/test/turing.jl +++ b/test/turing.jl @@ -4,7 +4,8 @@ s ~ InverseGamma(2, 3) m ~ Normal(0, sqrt(s)) 1.5 ~ Normal(m, sqrt(s)) - return 2.0 ~ Normal(m, sqrt(s)) + 2.0 ~ Normal(m, sqrt(s)) + return nothing end n_samples = 1000