Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

create ApproximateGPs.TestUtils #117

Merged
merged 17 commits into from
Mar 17, 2022
2 changes: 2 additions & 0 deletions src/ApproximateGPs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,6 @@ include("LaplaceApproximationModule.jl")

include("deprecations.jl")

include("TestUtils.jl")

end
74 changes: 74 additions & 0 deletions src/TestUtils.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
module TestUtils

using LinearAlgebra
using Random
using Test

using Distributions
using LogExpFunctions: logistic

using AbstractGPs
using ApproximateGPs
willtebbutt marked this conversation as resolved.
Show resolved Hide resolved

function generate_data()
X = range(0, 23.5; length=48)
# The random number generator changed in 1.6->1.7. The following vector was generated in Julia 1.6.
# The generating code below is only kept for illustrative purposes.
#! format: off
Y = [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0]
#! format: on
# Random.seed!(1)
# fs = @. 3 * sin(10 + 0.6X) + sin(0.1X) - 1
# # invlink = normcdf
# invlink = logistic
# ps = invlink.(fs)
# Y = @. rand(Bernoulli(ps))
return X, Y
end

dist_y_given_f(f) = Bernoulli(logistic(f))

function build_latent_gp(theta)
variance = softplus(theta[1])
lengthscale = softplus(theta[2])
kernel = variance * with_lengthscale(SqExponentialKernel(), lengthscale)
return LatentGP(GP(kernel), dist_y_given_f, 1e-8)
end

function test_approximation_predictions(approx)
rng = MersenneTwister(123456)
N_cond = 5
N_a = 6
N_b = 7

# Specify prior.
f = GP(Matern32Kernel())
# Sample from prior.
x = collect(range(-1.0, 1.0; length=N_cond))
noise_scale = 0.1
fx = f(x, noise_scale^2)
y = rand(rng, fx)

jitter = 0.0 # not needed in Gaussian case
lf = LatentGP(f, f -> Normal(f, noise_scale), jitter)
f_approx_post = posterior(approx, lf(x), y)

@testset "AbstractGPs API" begin
a = collect(range(-1.2, 1.2; length=N_a))
b = randn(rng, N_b)
AbstractGPs.TestUtils.test_internal_abstractgps_interface(rng, f_approx_post, a, b)
end

@testset "exact GPR equivalence for Gaussian likelihood" begin
willtebbutt marked this conversation as resolved.
Show resolved Hide resolved
f_exact_post = posterior(f(x, noise_scale^2), y)
xt = vcat(x, randn(rng, 3)) # test at training and new points

m_approx, c_approx = mean_and_cov(f_approx_post(xt))
m_exact, c_exact = mean_and_cov(f_exact_post(xt))

@test m_approx ≈ m_exact
@test c_approx ≈ c_exact
end
end

end
66 changes: 5 additions & 61 deletions test/LaplaceApproximationModule.jl
Original file line number Diff line number Diff line change
@@ -1,28 +1,7 @@
@testset "laplace" begin
function generate_data()
X = range(0, 23.5; length=48)
# The random number generator changed in 1.6->1.7. The following vector was generated in Julia 1.6.
# The generating code below is only kept for illustrative purposes.
#! format: off
Y = [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0]
#! format: on
# Random.seed!(1)
# fs = @. 3 * sin(10 + 0.6X) + sin(0.1X) - 1
# # invlink = normcdf
# invlink = logistic
# ps = invlink.(fs)
# Y = [rand(Bernoulli(p)) for p in ps]
return X, Y
end

dist_y_given_f(f) = Bernoulli(logistic(f))

function build_latent_gp(theta)
variance = softplus(theta[1])
lengthscale = softplus(theta[2])
kernel = variance * with_lengthscale(SqExponentialKernel(), lengthscale)
return LatentGP(GP(kernel), dist_y_given_f, 1e-8)
end
generate_data = ApproximateGPs.TestUtils.generate_data
dist_y_given_f = ApproximateGPs.TestUtils.dist_y_given_f
build_latent_gp = ApproximateGPs.TestUtils.build_latent_gp

function optimize_elbo(
build_latent_gp,
Expand All @@ -49,43 +28,8 @@
end

@testset "predictions" begin
rng = MersenneTwister(123456)
N_cond = 5
N_a = 6
N_b = 7

# Specify prior.
f = GP(Matern32Kernel())
# Sample from prior.
x = collect(range(-1.0, 1.0; length=N_cond))
noise_scale = 0.1
fx = f(x, noise_scale^2)
y = rand(rng, fx)

jitter = 0.0 # not needed in Gaussian case
lf = LatentGP(f, f -> Normal(f, noise_scale), jitter)
# in Gaussian case, Laplace converges to f_opt in one step; we need the
# second step to compute the cache at f_opt rather than f_init!
f_approx_post = posterior(LaplaceApproximation(; maxiter=2), lf(x), y)

@testset "AbstractGPs API" begin
a = collect(range(-1.2, 1.2; length=N_a))
b = randn(rng, N_b)
AbstractGPs.TestUtils.test_internal_abstractgps_interface(
rng, f_approx_post, a, b
)
end

@testset "equivalence to exact GPR for Gaussian likelihood" begin
f_exact_post = posterior(f(x, noise_scale^2), y)
xt = vcat(x, randn(rng, 3)) # test at training and new points

m_approx, c_approx = mean_and_cov(f_approx_post(xt))
m_exact, c_exact = mean_and_cov(f_exact_post(xt))

@test m_approx ≈ m_exact
@test c_approx ≈ c_exact
end
approx = LaplaceApproximation(; maxiter=2)
ApproximateGPs.TestUtils.test_approximation_predictions(approx)
end

@testset "gradients" begin
Expand Down
4 changes: 2 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
using Random
using Test
using ApproximateGPs
using Flux
using IterTools
using AbstractGPs
using AbstractGPs: LatentFiniteGP, TestUtils
using Distributions
using LogExpFunctions: logistic
using LinearAlgebra
Expand All @@ -14,6 +12,8 @@ using Zygote
using ChainRulesCore
using ChainRulesTestUtils
using FiniteDifferences

using ApproximateGPs
using ApproximateGPs: SparseVariationalApproximationModule, LaplaceApproximationModule

# Writing tests:
Expand Down