GPy/GPy/testing/kernel_tests.py

380 lines
15 KiB
Python
Raw Normal View History

# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import unittest
import numpy as np
import GPy
import sys
2014-03-31 12:45:09 +01:00
from GPy.core.parameterization.param import Param
2014-03-11 10:24:30 +00:00
verbose = 0
class Kern_check_model(GPy.core.Model):
"""
This is a dummy model class used as a base class for checking that the
gradients of a given kernel are implemented correctly. It enables
checkgrad() to be called independently on a kernel.
"""
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
GPy.core.Model.__init__(self, 'kernel_test_model')
if kernel==None:
kernel = GPy.kern.RBF(1)
if X is None:
X = np.random.randn(20, kernel.input_dim)
if dL_dK is None:
if X2 is None:
dL_dK = np.ones((X.shape[0], X.shape[0]))
else:
dL_dK = np.ones((X.shape[0], X2.shape[0]))
self.kernel = kernel
2014-03-31 12:45:09 +01:00
self.X = X
self.X2 = X2
self.dL_dK = dL_dK
def is_positive_semi_definite(self):
v = np.linalg.eig(self.kernel.K(self.X))[0]
if any(v.real<=-1e-10):
print v.real.min()
return False
else:
return True
def log_likelihood(self):
return np.sum(self.dL_dK*self.kernel.K(self.X, self.X2))
class Kern_check_dK_dtheta(Kern_check_model):
"""
This class allows gradient checks for the gradient of a kernel with
respect to parameters.
"""
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
self.link_parameter(self.kernel)
def parameters_changed(self):
return self.kernel.update_gradients_full(self.dL_dK, self.X, self.X2)
class Kern_check_dKdiag_dtheta(Kern_check_model):
"""
This class allows gradient checks of the gradient of the diagonal of a
kernel with respect to the parameters.
"""
def __init__(self, kernel=None, dL_dK=None, X=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
self.link_parameter(self.kernel)
def log_likelihood(self):
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
def parameters_changed(self):
2014-02-27 10:47:27 +00:00
self.kernel.update_gradients_diag(np.diag(self.dL_dK), self.X)
class Kern_check_dK_dX(Kern_check_model):
"""This class allows gradient checks for the gradient of a kernel with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
2014-03-31 12:45:09 +01:00
self.X = Param('X',X)
self.link_parameter(self.X)
def parameters_changed(self):
2014-03-31 12:45:09 +01:00
self.X.gradient[:] = self.kernel.gradients_X(self.dL_dK, self.X, self.X2)
class Kern_check_dKdiag_dX(Kern_check_dK_dX):
"""This class allows gradient checks for the gradient of a kernel diagonal with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_dK_dX.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
def log_likelihood(self):
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
def parameters_changed(self):
2014-03-31 12:45:09 +01:00
self.X.gradient[:] = self.kernel.gradients_X_diag(self.dL_dK.diagonal(), self.X)
2014-02-27 10:47:27 +00:00
def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verbose=False, fixed_X_dims=None):
"""
This function runs on kernels to check the correctness of their
implementation. It checks that the covariance function is positive definite
for a randomly generated data set.
:param kern: the kernel to be tested.
:type kern: GPy.kern.Kernpart
:param X: X input values to test the covariance function.
:type X: ndarray
:param X2: X2 input values to test the covariance function.
:type X2: ndarray
"""
pass_checks = True
if X is None:
X = np.random.randn(10, kern.input_dim)
if output_ind is not None:
X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0])
if X2 is None:
X2 = np.random.randn(20, kern.input_dim)
if output_ind is not None:
X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0])
if verbose:
print("Checking covariance function is positive definite.")
result = Kern_check_model(kern, X=X).is_positive_semi_definite()
if result and verbose:
print("Check passed.")
if not result:
print("Positive definite check failed for " + kern.name + " covariance function.")
pass_checks = False
2014-03-31 12:45:09 +01:00
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X) wrt theta.")
result = Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=verbose)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
pass_checks = False
2014-03-31 12:45:09 +01:00
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X2) wrt theta.")
result = Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=verbose)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
pass_checks = False
2014-03-31 12:45:09 +01:00
assert(result)
return False
if verbose:
print("Checking gradients of Kdiag(X) wrt theta.")
2014-03-21 10:55:34 +00:00
try:
result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print("update_gradients_diag not implemented for " + kern.name)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
pass_checks = False
2014-03-31 12:45:09 +01:00
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X) wrt X.")
try:
testmodel = Kern_check_dK_dX(kern, X=X, X2=None)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print("gradients_X not implemented for " + kern.name)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
testmodel.checkgrad(verbose=True)
2014-03-31 12:45:09 +01:00
import ipdb;ipdb.set_trace()
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of K(X, X2) wrt X.")
try:
testmodel = Kern_check_dK_dX(kern, X=X, X2=X2)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print("gradients_X not implemented for " + kern.name)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
testmodel.checkgrad(verbose=True)
2014-03-31 12:45:09 +01:00
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of Kdiag(X) wrt X.")
try:
result = Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print("gradients_X not implemented for " + kern.name)
if result and verbose:
print("Check passed.")
if not result:
print("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
pass_checks = False
2014-03-31 12:45:09 +01:00
assert(result)
return False
return pass_checks
2014-02-27 10:47:27 +00:00
2014-03-11 10:24:30 +00:00
class KernelGradientTestsContinuous(unittest.TestCase):
def setUp(self):
2014-03-31 12:45:09 +01:00
self.N, self.D = 10, 5
self.X = np.random.randn(self.N,self.D)
self.X2 = np.random.randn(self.N+10,self.D)
2014-02-27 10:47:27 +00:00
continuous_kerns = ['RBF', 'Linear']
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
def test_Matern32(self):
k = GPy.kern.Matern32(self.D)
k.randomize()
2014-03-11 10:24:30 +00:00
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod(self):
2014-03-14 10:55:16 +00:00
k = GPy.kern.Matern32(2, active_dims=[2,3]) * GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod2(self):
k = (GPy.kern.RBF(2, active_dims=[0,4]) * GPy.kern.Linear(self.D))
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod3(self):
k = (GPy.kern.RBF(2, active_dims=[0,4]) * GPy.kern.Linear(self.D))
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Add(self):
2014-03-14 10:55:16 +00:00
k = GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
2014-03-21 10:16:49 +00:00
k += GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
2014-03-21 10:55:34 +00:00
def test_Add_dims(self):
k = GPy.kern.Matern32(2, active_dims=[2,self.D]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertRaises(IndexError, k.K, self.X)
2014-03-21 10:55:34 +00:00
k = GPy.kern.Matern32(2, active_dims=[2,self.D-1]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
# assert it runs:
try:
k.K(self.X)
except AssertionError:
raise AssertionError, "k.K(X) should run on self.D-1 dimension"
def test_Matern52(self):
k = GPy.kern.Matern52(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_RBF(self):
k = GPy.kern.RBF(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Linear(self):
k = GPy.kern.Linear(self.D)
k.randomize()
2014-03-11 10:24:30 +00:00
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
2014-03-25 16:59:52 +00:00
def test_LinearFull(self):
k = GPy.kern.LinearFull(self.D, self.D-1)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
2014-03-11 10:24:30 +00:00
class KernelTestsMiscellaneous(unittest.TestCase):
def setUp(self):
N, D = 100, 10
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.random.uniform(-10,10,D)
self.rbf = GPy.kern.RBF(2, active_dims=np.arange(0,4,2))
2014-03-14 10:55:16 +00:00
self.linear = GPy.kern.Linear(2, active_dims=(3,9))
self.matern = GPy.kern.Matern32(3, active_dims=np.array([1,7,9]))
2014-03-11 10:24:30 +00:00
self.sumkern = self.rbf + self.linear
self.sumkern += self.matern
self.sumkern.randomize()
def test_which_parts(self):
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X)))
2014-02-27 10:47:27 +00:00
2014-03-31 12:48:36 +01:00
class KernelTestsNonContinuous(unittest.TestCase):
def setUp(self):
N0 = 3
N1 = 9
N2 = 4
N = N0+N1+N2
self.D = 3
self.X = np.random.randn(N, self.D+1)
indices = np.random.random_integers(0, 2, size=N)
self.X[indices==0, -1] = 0
self.X[indices==1, -1] = 1
self.X[indices==2, -1] = 2
#self.X = self.X[self.X[:, -1].argsort(), :]
self.X2 = np.random.randn((N0+N1)*2, self.D+1)
self.X2[:(N0*2), -1] = 0
self.X2[(N0*2):, -1] = 1
2014-03-31 12:48:36 +01:00
def test_IndependentOutputs(self):
k = GPy.kern.RBF(self.D, active_dims=range(self.D))
2014-03-31 12:48:36 +01:00
kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, active_dims=range(self.D), name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
def test_Hierarchical(self):
k = [GPy.kern.RBF(2, active_dims=[0,2], name='rbf1'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf2')]
2014-03-31 12:48:36 +01:00
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
2014-03-31 12:48:36 +01:00
def test_ODE_UY(self):
kern = GPy.kern.ODE_UY(2, active_dims=[0, self.D])
X = self.X[self.X[:,-1]!=2]
X2 = self.X2[self.X2[:,-1]!=2]
self.assertTrue(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
2014-03-20 11:16:59 +00:00
if __name__ == "__main__":
print "Running unit tests, please be (very) patient..."
2014-03-31 12:45:09 +01:00
unittest.main()
# np.random.seed(0)
# N0 = 3
# N1 = 9
# N2 = 4
# N = N0+N1+N2
# D = 3
# X = np.random.randn(N, D+1)
# indices = np.random.random_integers(0, 2, size=N)
# X[indices==0, -1] = 0
# X[indices==1, -1] = 1
# X[indices==2, -1] = 2
# #X = X[X[:, -1].argsort(), :]
# X2 = np.random.randn((N0+N1)*2, D+1)
# X2[:(N0*2), -1] = 0
# X2[(N0*2):, -1] = 1
# k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(D, name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
# kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
# assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
# k = GPy.kern.RBF(D)
# kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
# assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))