Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ApproximateGPs"
uuid = "298c2ebc-0411-48ad-af38-99e88101b606"
authors = ["JuliaGaussianProcesses Team"]
version = "0.4.5"
version = "0.4.6"

[deps]
AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"
Expand All @@ -23,7 +23,7 @@ StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[compat]
AbstractGPs = "0.3, 0.4, 0.5"
AbstractGPs = "0.5.17"
ChainRulesCore = "1.7"
Distributions = "0.25"
FastGaussQuadrature = "0.4, 0.5"
Expand Down
12 changes: 2 additions & 10 deletions src/API.jl
Original file line number Diff line number Diff line change
@@ -1,15 +1,7 @@
module API

export approx_lml # TODO move to AbstractGPs, see https://github.com/JuliaGaussianProcesses/AbstractGPs.jl/issues/221
export approx_lml # this was moved into AbstractGPs.approx_log_evidence, see https://github.com/JuliaGaussianProcesses/AbstractGPs.jl/pull/361

"""
approx_lml(approx::<Approximation>, lfx::LatentFiniteGP, ys)

Compute an approximation to the log of the marginal likelihood (also known as
"evidence") under the given `approx` to the posterior. This approximation can be used to optimise the hyperparameters of `lfx`.

This should become part of the AbstractGPs API (see JuliaGaussianProcesses/AbstractGPs.jl#221).
"""
function approx_lml end
@deprecate approx_lml(approx, lfx, ys) approx_log_evidence(approx, lfx, ys)

end
4 changes: 2 additions & 2 deletions src/LaplaceApproximationModule.jl
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,14 @@
end

"""
approx_lml(la::LaplaceApproximation, lfx::LatentFiniteGP, ys)
approx_log_evidence(la::LaplaceApproximation, lfx::LatentFiniteGP, ys)

Compute an approximation to the log of the marginal likelihood (also known as
"evidence"), which can be used to optimise the hyperparameters of `lfx`.

This should become part of the AbstractGPs API (see JuliaGaussianProcesses/AbstractGPs.jl#221).
"""
function API.approx_lml(la::LaplaceApproximation, lfx::LatentFiniteGP, ys)
function AbstractGPs.approx_log_evidence(la::LaplaceApproximation, lfx::LatentFiniteGP, ys)

Check warning on line 58 in src/LaplaceApproximationModule.jl

View check run for this annotation

Codecov / codecov/patch

src/LaplaceApproximationModule.jl#L58

Added line #L58 was not covered by tests
return laplace_lml(lfx, ys; la.newton_kwargs...)
end

Expand Down
4 changes: 2 additions & 2 deletions src/SparseVariationalApproximationModule.jl
Original file line number Diff line number Diff line change
Expand Up @@ -273,10 +273,10 @@
# elbo
#

function API.approx_lml(
function AbstractGPs.approx_log_evidence(

Check warning on line 276 in src/SparseVariationalApproximationModule.jl

View check run for this annotation

Codecov / codecov/patch

src/SparseVariationalApproximationModule.jl#L276

Added line #L276 was not covered by tests
sva::SparseVariationalApproximation, l_fx::Union{FiniteGP,LatentFiniteGP}, ys; kwargs...
)
return AbstractGPs.elbo(sva, l_fx, ys; kwargs...)
return elbo(sva, l_fx, ys; kwargs...)

Check warning on line 279 in src/SparseVariationalApproximationModule.jl

View check run for this annotation

Codecov / codecov/patch

src/SparseVariationalApproximationModule.jl#L279

Added line #L279 was not covered by tests
end

"""
Expand Down
4 changes: 2 additions & 2 deletions src/TestUtils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function build_latent_gp(theta)
end

"""
test_approx_lml(approx)
test_approx_log_evidence(approx)

Test whether in the conjugate case `approx_lml(approx, LatentGP(f,
GaussianLikelihood(), jitter)(x), y)` gives approximately the same answer as
Expand All @@ -51,7 +51,7 @@ the log marginal likelihood in exact GP regression.
!!! todo
Also test gradients (for hyperparameter optimization).
"""
function test_approx_lml end
function test_approx_log_evidence end

"""
test_approximation_predictions(approx)
Expand Down
6 changes: 3 additions & 3 deletions test/LaplaceApproximationModule.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@
end

@testset "gradients" begin
@testset "approx_lml" begin
@testset "approx_log_evidence" begin
X, Y = generate_data()

Random.seed!(123)
theta0 = rand(2)
function objective(theta)
lf = build_latent_gp(theta)
lml = approx_lml(LaplaceApproximation(), lf(X), Y)
lml = approx_log_evidence(LaplaceApproximation(), lf(X), Y)
return -lml
end
fd_grad = only(FiniteDifferences.grad(central_fdm(5, 1), objective, theta0))
Expand Down Expand Up @@ -145,7 +145,7 @@
@testset "reference optimum" begin
function objective(theta)
lf = build_latent_gp(theta)
return -approx_lml(LaplaceApproximation(), lf(X), Y)
return -approx_log_evidence(LaplaceApproximation(), lf(X), Y)
end

@testset "NelderMead" begin
Expand Down
2 changes: 1 addition & 1 deletion test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
AbstractGPs = "0.4, 0.5"
AbstractGPs = "0.5.17"
ApproximateGPs = "0.4"
ChainRulesCore = "1"
ChainRulesTestUtils = "1.2.3"
Expand Down