diff --git a/Project.toml b/Project.toml index 02a02f68..50de5fe1 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ApproximateGPs" uuid = "298c2ebc-0411-48ad-af38-99e88101b606" authors = ["JuliaGaussianProcesses Team"] -version = "0.4.5" +version = "0.4.6" [deps] AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918" @@ -23,7 +23,7 @@ StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] -AbstractGPs = "0.3, 0.4, 0.5" +AbstractGPs = "0.5.17" ChainRulesCore = "1.7" Distributions = "0.25" FastGaussQuadrature = "0.4, 0.5" diff --git a/src/API.jl b/src/API.jl index cc710869..a8893a47 100644 --- a/src/API.jl +++ b/src/API.jl @@ -1,15 +1,7 @@ module API -export approx_lml # TODO move to AbstractGPs, see https://github.com/JuliaGaussianProcesses/AbstractGPs.jl/issues/221 +export approx_lml # this was moved into AbstractGPs.approx_log_evidence, see https://github.com/JuliaGaussianProcesses/AbstractGPs.jl/pull/361 -""" - approx_lml(approx::, lfx::LatentFiniteGP, ys) - -Compute an approximation to the log of the marginal likelihood (also known as -"evidence") under the given `approx` to the posterior. This approximation can be used to optimise the hyperparameters of `lfx`. - -This should become part of the AbstractGPs API (see JuliaGaussianProcesses/AbstractGPs.jl#221). -""" -function approx_lml end +@deprecate approx_lml(approx, lfx, ys) approx_log_evidence(approx, lfx, ys) end diff --git a/src/LaplaceApproximationModule.jl b/src/LaplaceApproximationModule.jl index e596a72d..eb93fef9 100644 --- a/src/LaplaceApproximationModule.jl +++ b/src/LaplaceApproximationModule.jl @@ -48,14 +48,14 @@ function AbstractGPs.posterior(la::LaplaceApproximation, lfx::LatentFiniteGP, ys end """ - approx_lml(la::LaplaceApproximation, lfx::LatentFiniteGP, ys) + approx_log_evidence(la::LaplaceApproximation, lfx::LatentFiniteGP, ys) Compute an approximation to the log of the marginal likelihood (also known as "evidence"), which can be used to optimise the hyperparameters of `lfx`. This should become part of the AbstractGPs API (see JuliaGaussianProcesses/AbstractGPs.jl#221). """ -function API.approx_lml(la::LaplaceApproximation, lfx::LatentFiniteGP, ys) +function AbstractGPs.approx_log_evidence(la::LaplaceApproximation, lfx::LatentFiniteGP, ys) return laplace_lml(lfx, ys; la.newton_kwargs...) end diff --git a/src/SparseVariationalApproximationModule.jl b/src/SparseVariationalApproximationModule.jl index 1f80938e..1dbbfaf1 100644 --- a/src/SparseVariationalApproximationModule.jl +++ b/src/SparseVariationalApproximationModule.jl @@ -273,10 +273,10 @@ inducing_points(f::ApproxPosteriorGP{<:SparseVariationalApproximation}) = f.appr # elbo # -function API.approx_lml( +function AbstractGPs.approx_log_evidence( sva::SparseVariationalApproximation, l_fx::Union{FiniteGP,LatentFiniteGP}, ys; kwargs... ) - return AbstractGPs.elbo(sva, l_fx, ys; kwargs...) + return elbo(sva, l_fx, ys; kwargs...) end """ diff --git a/src/TestUtils.jl b/src/TestUtils.jl index 7836e974..cf0689b6 100644 --- a/src/TestUtils.jl +++ b/src/TestUtils.jl @@ -36,7 +36,7 @@ function build_latent_gp(theta) end """ - test_approx_lml(approx) + test_approx_log_evidence(approx) Test whether in the conjugate case `approx_lml(approx, LatentGP(f, GaussianLikelihood(), jitter)(x), y)` gives approximately the same answer as @@ -51,7 +51,7 @@ the log marginal likelihood in exact GP regression. !!! todo Also test gradients (for hyperparameter optimization). """ -function test_approx_lml end +function test_approx_log_evidence end """ test_approximation_predictions(approx) diff --git a/test/LaplaceApproximationModule.jl b/test/LaplaceApproximationModule.jl index 547b60f3..f4b0e280 100644 --- a/test/LaplaceApproximationModule.jl +++ b/test/LaplaceApproximationModule.jl @@ -33,14 +33,14 @@ end @testset "gradients" begin - @testset "approx_lml" begin + @testset "approx_log_evidence" begin X, Y = generate_data() Random.seed!(123) theta0 = rand(2) function objective(theta) lf = build_latent_gp(theta) - lml = approx_lml(LaplaceApproximation(), lf(X), Y) + lml = approx_log_evidence(LaplaceApproximation(), lf(X), Y) return -lml end fd_grad = only(FiniteDifferences.grad(central_fdm(5, 1), objective, theta0)) @@ -145,7 +145,7 @@ @testset "reference optimum" begin function objective(theta) lf = build_latent_gp(theta) - return -approx_lml(LaplaceApproximation(), lf(X), Y) + return -approx_log_evidence(LaplaceApproximation(), lf(X), Y) end @testset "NelderMead" begin diff --git a/test/Project.toml b/test/Project.toml index 58b2bd88..208d19d6 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -16,7 +16,7 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] -AbstractGPs = "0.4, 0.5" +AbstractGPs = "0.5.17" ApproximateGPs = "0.4" ChainRulesCore = "1" ChainRulesTestUtils = "1.2.3"