-
Notifications
You must be signed in to change notification settings - Fork 7
Expand file tree
/
Copy pathTestUtils.jl
More file actions
111 lines (88 loc) · 3.33 KB
/
TestUtils.jl
File metadata and controls
111 lines (88 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
module TestUtils
using LinearAlgebra
using Random
using Test
using Distributions
using LogExpFunctions: logistic, softplus
using AbstractGPs
using ApproximateGPs
function generate_data()
X = range(0, 23.5; length=48)
# The random number generator changed across older Julia releases. The following vector
# was generated before that change to keep this test stable across supported versions.
# The generating code below is only kept for illustrative purposes.
#! format: off
Y = [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0]
#! format: on
# Random.seed!(1)
# fs = @. 3 * sin(10 + 0.6X) + sin(0.1X) - 1
# # invlink = normcdf
# invlink = logistic
# ps = invlink.(fs)
# Y = @. rand(Bernoulli(ps))
return X, Y
end
dist_y_given_f(f) = Bernoulli(logistic(f))
function build_latent_gp(theta)
variance = softplus(theta[1])
lengthscale = softplus(theta[2])
kernel = variance * with_lengthscale(SqExponentialKernel(), lengthscale)
return LatentGP(GP(kernel), dist_y_given_f, 1e-8)
end
"""
test_approx_lml(approx)
Test whether in the conjugate case `approx_lml(approx, LatentGP(f,
GaussianLikelihood(), jitter)(x), y)` gives approximately the same answer as
the log marginal likelihood in exact GP regression.
!!! todo
Not yet implemented.
Will not necessarily work for approximations that rely on optimization such
as `SparseVariationalApproximation`.
!!! todo
Also test gradients (for hyperparameter optimization).
"""
function test_approx_lml end
"""
test_approximation_predictions(approx)
Test whether the prediction interface for `approx` works and whether in the
conjugate case `posterior(approx, LatentGP(f, GaussianLikelihood(), jitter)(x), y)`
gives approximately the same answer as the exact GP regression posterior.
!!! note
Should be satisfied by all approximate inference methods, but note that
this does not currently apply for some approximations which rely on
optimization such as `SparseVariationalApproximation`.
!!! warning
Do not rely on this as the only test of a new approximation!
See `test_approx_lml`.
"""
function test_approximation_predictions(approx)
rng = MersenneTwister(123456)
N_cond = 5
N_a = 6
N_b = 7
# Specify prior.
f = GP(Matern32Kernel())
# Sample from prior.
x = collect(range(-1.0, 1.0; length=N_cond))
# TODO: Change to x = ColVecs(rand(2, N_cond)) once #109 is fixed
noise_scale = 0.1
fx = f(x, noise_scale^2)
y = rand(rng, fx)
jitter = 0.0 # not needed in Gaussian case
lf = LatentGP(f, f -> Normal(f, noise_scale), jitter)
f_approx_post = posterior(approx, lf(x), y)
@testset "AbstractGPs API" begin
a = collect(range(-1.2, 1.2; length=N_a))
b = randn(rng, N_b)
AbstractGPs.TestUtils.test_internal_abstractgps_interface(rng, f_approx_post, a, b)
end
@testset "exact GPR equivalence for Gaussian likelihood" begin
f_exact_post = posterior(f(x, noise_scale^2), y)
xt = vcat(x, randn(rng, 3)) # test at training and new points
m_approx, c_approx = mean_and_cov(f_approx_post(xt))
m_exact, c_exact = mean_and_cov(f_exact_post(xt))
@test m_approx ≈ m_exact
@test c_approx ≈ c_exact
end
end
end