Skip to content

Commit affcb21

Browse files
committed
TST: Add basic tests to GPR kernels
Add basic tests to GPR kernels.
1 parent 4a85790 commit affcb21

File tree

1 file changed

+164
-0
lines changed

1 file changed

+164
-0
lines changed

test/test_gpr.py

Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,17 @@
134134
]
135135

136136

137+
def _unit_vectors_nd(n_samples: int, n_features: int) -> np.ndarray:
138+
# Orthogonal unit vectors as rows
139+
if n_samples < 1 or n_features < 1:
140+
raise ValueError("n_samples and n_features must be >= 1")
141+
if n_samples > n_features:
142+
# In R^d we can have at most d mutually orthogonal non‑zero vectors
143+
raise ValueError("n_samples must be <= n_features to produce orthogonal vectors")
144+
145+
return np.eye(n_features, dtype=float)[:n_samples]
146+
147+
137148
# No need to use normalized vectors: compute_pairwise_angles takes care of it.
138149
# The [-1, 0, 1].T vector serves as a case where e.g. the angle between vector
139150
# [1, 0, 0] and the former is 135 unless the closest polarity flag is set to
@@ -311,3 +322,156 @@ def obj_func(theta, eval_gradient):
311322
ValueError, match=gpr.UNKNOWN_OPTIMIZER_ERROR_MSG.format(optimizer=optimizer)
312323
):
313324
gp._constrained_optimization(obj_func, initial_theta, bounds)
325+
326+
327+
@pytest.mark.parametrize(
328+
"beta_a, beta_l, a_bounds, l_bounds, eval_gradient",
329+
[
330+
(0.5, 2.0, (0.1, 1.0), (0.1, 10.0), False),
331+
(1.0, 1.0, (0.1, 2.0), (0.01, 5.0), True),
332+
],
333+
)
334+
def test_exponential_kriging_properties(beta_a, beta_l, a_bounds, l_bounds, eval_gradient):
335+
ek = gpr.ExponentialKriging(beta_a=beta_a, beta_l=beta_l, a_bounds=a_bounds, l_bounds=l_bounds)
336+
337+
# Hyperparameter properties
338+
hp_a = ek.hyperparameter_a
339+
hp_l = ek.hyperparameter_l
340+
assert getattr(hp_a, "name", None) == "beta_a"
341+
assert getattr(hp_l, "name", None) == "beta_l"
342+
# Bounds should match what we passed (fallback to instance attr if absent)
343+
hp_a_bounds = np.asarray(getattr(hp_a, "bounds", ek.a_bounds), dtype=float)
344+
expected_a_bounds = np.asarray(ek.a_bounds, dtype=float)
345+
assert np.allclose(hp_a_bounds, expected_a_bounds)
346+
hp_l_bounds = np.asarray(getattr(hp_l, "bounds", ek.l_bounds), dtype=float)
347+
expected_l_bounds = np.asarray(ek.l_bounds, dtype=float)
348+
assert np.allclose(hp_l_bounds, expected_l_bounds)
349+
350+
X = _unit_vectors_nd(2, 2)
351+
K2, _ = ek(X, eval_gradient=eval_gradient)
352+
353+
# stationarity and repr
354+
assert ek.is_stationary() is True
355+
assert "ExponentialKriging" in repr(ek)
356+
assert f"a={ek.beta_a}" in repr(ek) or "beta_a" in repr(ek)
357+
358+
359+
@pytest.mark.parametrize(
360+
"beta_a, beta_l, a_bounds, l_bounds, eval_gradient",
361+
[
362+
(10.0, 0.5, (0.1, 20.0), (0.1, 5.0), False), # large a to trigger deriv_a active
363+
(1.5, 2.0, (0.1, 5.0), (0.1, 10.0), True),
364+
],
365+
)
366+
def test_spherical_kriging_properties(beta_a, beta_l, a_bounds, l_bounds, eval_gradient):
367+
sk = gpr.SphericalKriging(beta_a=beta_a, beta_l=beta_l, a_bounds=a_bounds, l_bounds=l_bounds)
368+
369+
hp_a = sk.hyperparameter_a
370+
hp_l = sk.hyperparameter_l
371+
assert getattr(hp_a, "name", None) == "beta_a"
372+
assert getattr(hp_l, "name", None) == "beta_l"
373+
hp_a_bounds = np.asarray(getattr(hp_a, "bounds", sk.a_bounds), dtype=float)
374+
expected_a_bounds = np.asarray(sk.a_bounds, dtype=float)
375+
assert np.allclose(hp_a_bounds, expected_a_bounds)
376+
hp_l_bounds = np.asarray(getattr(hp_l, "bounds", sk.l_bounds), dtype=float)
377+
expected_l_bounds = np.asarray(sk.l_bounds, dtype=float)
378+
assert np.allclose(hp_l_bounds, expected_l_bounds)
379+
380+
X = _unit_vectors_nd(2, 2)
381+
K, _ = sk(X, eval_gradient=eval_gradient)
382+
383+
# stationarity and repr
384+
assert sk.is_stationary() is True
385+
assert "SphericalKriging" in repr(sk)
386+
387+
388+
@pytest.mark.parametrize(
389+
"beta_a, beta_l, a_bounds, l_bounds, n_samples, n_features, eval_gradient",
390+
[
391+
(0.5, 2.0, (0.1, 1.0), (0.1, 10.0), 2, 3, False),
392+
(1.0, 1.0, (0.1, 2.0), (0.01, 5.0), 3, 4, True),
393+
],
394+
)
395+
def test_exponential_kriging_basic(
396+
beta_a, beta_l, a_bounds, l_bounds, n_samples, n_features, eval_gradient
397+
):
398+
ek = gpr.ExponentialKriging(beta_a=beta_a, beta_l=beta_l, a_bounds=a_bounds, l_bounds=l_bounds)
399+
400+
X = _unit_vectors_nd(n_samples, n_features)
401+
if eval_gradient:
402+
K, grad = ek(X, eval_gradient=True)
403+
else:
404+
K = ek(X, eval_gradient=False)
405+
grad = None
406+
407+
assert K.shape == (n_samples, n_samples)
408+
409+
thetas = gpr.compute_pairwise_angles(X)
410+
expected = ek.beta_l * gpr.exponential_covariance(thetas, ek.beta_a)
411+
assert np.allclose(K, expected)
412+
413+
if eval_gradient:
414+
assert grad is not None
415+
assert grad.shape == (*thetas.shape, 2) # two params: a and lambda
416+
417+
C_theta = gpr.exponential_covariance(thetas, ek.beta_a)
418+
deriv_a = ek.beta_l * C_theta * thetas / (ek.beta_a**2)
419+
deriv_lambda = C_theta
420+
421+
expected_grad = np.zeros((*thetas.shape, 2))
422+
expected_grad[..., 0] = deriv_a
423+
expected_grad[..., 1] = deriv_lambda
424+
425+
assert np.allclose(grad, expected_grad)
426+
427+
# diag
428+
d = ek.diag(X)
429+
assert d.shape == (n_samples,)
430+
assert np.allclose(d, ek.beta_l * np.ones(n_samples))
431+
432+
433+
@pytest.mark.parametrize(
434+
"beta_a, beta_l, a_bounds, l_bounds, n_samples, n_features, eval_gradient",
435+
[
436+
(10.0, 0.5, (0.1, 20.0), (0.1, 5.0), 2, 3, False), # large a to trigger deriv_a active
437+
(1.5, 2.0, (0.1, 5.0), (0.1, 10.0), 3, 4, True),
438+
],
439+
)
440+
def test_spherical_kriging_basic(
441+
beta_a, beta_l, a_bounds, l_bounds, n_samples, n_features, eval_gradient
442+
):
443+
sk = gpr.SphericalKriging(beta_a=beta_a, beta_l=beta_l, a_bounds=a_bounds, l_bounds=l_bounds)
444+
445+
X = _unit_vectors_nd(n_samples, n_features)
446+
if eval_gradient:
447+
K, grad = sk(X, eval_gradient=True)
448+
else:
449+
K = sk(X, eval_gradient=False)
450+
grad = None
451+
452+
assert K.shape == (n_samples, n_samples)
453+
454+
thetas = gpr.compute_pairwise_angles(X)
455+
expected = sk.beta_l * gpr.spherical_covariance(thetas, sk.beta_a)
456+
assert np.allclose(K, expected)
457+
458+
if eval_gradient:
459+
assert grad is not None
460+
assert grad.shape == (*thetas.shape, 2) # two params: a and lambda
461+
462+
# deriv_a as implemented in SphericalKriging
463+
deriv_a = np.zeros_like(thetas)
464+
nonzero = thetas <= sk.beta_a
465+
deriv_a[nonzero] = (
466+
1.5
467+
* sk.beta_l
468+
* (thetas[nonzero] / sk.beta_a**2 - thetas[nonzero] ** 3 / sk.beta_a**4)
469+
)
470+
expected_grad = np.dstack((deriv_a, gpr.spherical_covariance(thetas, sk.beta_a)))
471+
472+
assert np.allclose(grad, expected_grad)
473+
474+
# diag
475+
d = sk.diag(X)
476+
assert d.shape == (n_samples,)
477+
assert np.allclose(d, sk.beta_l * np.ones(n_samples))

0 commit comments

Comments
 (0)