we need to update all the tests: here discontinuous kernel testsee messing, mrd and bgplvm model tests not needed anymore

This commit is contained in:
Max Zwiessele 2014-03-13 12:28:56 +00:00
parent 1102387a76
commit 3d6a69e5f0
6 changed files with 74 additions and 152 deletions

View file

@ -1,85 +0,0 @@
# Copyright (c) 2012, Nicolo Fusi
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import unittest
import numpy as np
import GPy
from ..models import BayesianGPLVM
class BGPLVMTests(unittest.TestCase):
def test_bias_kern(self):
N, num_inducing, input_dim, D = 10, 3, 2, 4
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
def test_linear_kern(self):
N, num_inducing, input_dim, D = 10, 3, 2, 4
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
def test_rbf_kern(self):
N, num_inducing, input_dim, D = 10, 3, 2, 4
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
def test_rbf_bias_kern(self):
N, num_inducing, input_dim, D = 10, 3, 2, 4
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
def test_rbf_line_kern(self):
N, num_inducing, input_dim, D = 10, 3, 2, 4
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim) + GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
def test_linear_bias_kern(self):
N, num_inducing, input_dim, D = 30, 5, 4, 30
X = np.random.rand(N, input_dim)
k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
Y -= Y.mean(axis=0)
k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
m.randomize()
self.assertTrue(m.checkgrad())
if __name__ == "__main__":
print "Running unit tests, please be (very) patient..."
unittest.main()

View file

@ -33,9 +33,10 @@ class Kern_check_model(GPy.core.Model):
self.X2 = X2 self.X2 = X2
self.dL_dK = dL_dK self.dL_dK = dL_dK
def is_positive_definite(self): def is_positive_semi_definite(self):
v = np.linalg.eig(self.kernel.K(self.X))[0] v = np.linalg.eig(self.kernel.K(self.X))[0]
if any(v<-10*sys.float_info.epsilon): if any(v.real<=-1e-10):
print v.real.min()
return False return False
else: else:
return True return True
@ -89,7 +90,7 @@ class Kern_check_dKdiag_dX(Kern_check_dK_dX):
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum() return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
def parameters_changed(self): def parameters_changed(self):
self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK, self.X) self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK.diagonal(), self.X)
@ -119,7 +120,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
if verbose: if verbose:
print("Checking covariance function is positive definite.") print("Checking covariance function is positive definite.")
result = Kern_check_model(kern, X=X).is_positive_definite() result = Kern_check_model(kern, X=X).is_positive_semi_definite()
if result and verbose: if result and verbose:
print("Check passed.") print("Check passed.")
if not result: if not result:
@ -214,18 +215,55 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
class KernelGradientTestsContinuous(unittest.TestCase): class KernelGradientTestsContinuous(unittest.TestCase):
def setUp(self): def setUp(self):
self.X = np.random.randn(100,2) self.N, self.D = 100, 5
self.X2 = np.random.randn(110,2) self.X = np.random.randn(self.N,self.D)
self.X2 = np.random.randn(self.N+10,self.D)
continuous_kerns = ['RBF', 'Linear'] continuous_kerns = ['RBF', 'Linear']
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns] self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
def test_Matern32(self): def test_Matern32(self):
k = GPy.kern.Matern32(2) k = GPy.kern.Matern32(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Matern52(self): def test_Matern52(self):
k = GPy.kern.Matern52(2) k = GPy.kern.Matern52(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_RBF(self):
k = GPy.kern.RBF(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Linear(self):
k = GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
class KernelGradientTestsContinuous1D(unittest.TestCase):
def setUp(self):
self.N, self.D = 100, 1
self.X = np.random.randn(self.N,self.D)
self.X2 = np.random.randn(self.N+10,self.D)
continuous_kerns = ['RBF', 'Linear']
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
def test_PeriodicExponential(self):
k = GPy.kern.PeriodicExponential(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_PeriodicMatern32(self):
k = GPy.kern.PeriodicMatern32(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_PeriodicMatern52(self):
k = GPy.kern.PeriodicMatern52(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
#TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize #TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize
@ -251,6 +289,7 @@ class KernelTestsMiscellaneous(unittest.TestCase):
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X))) self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X))) self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X)))
if __name__ == "__main__": if __name__ == "__main__":
print "Running unit tests, please be (very) patient..." print "Running unit tests, please be (very) patient..."
unittest.main() unittest.main()

View file

@ -541,7 +541,8 @@ class TestNoiseModels(object):
#import ipdb; ipdb.set_trace() #import ipdb; ipdb.set_trace()
#NOTE this test appears to be stochastic for some likelihoods (student t?) #NOTE this test appears to be stochastic for some likelihoods (student t?)
# appears to all be working in test mode right now... # appears to all be working in test mode right now...
if isinstance(model, GPy.likelihoods.StudentT):
import ipdb;ipdb.set_trace()
assert m.checkgrad(step=step) assert m.checkgrad(step=step)
########### ###########
@ -700,7 +701,6 @@ class LaplaceTests(unittest.TestCase):
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)
#Check marginals are the same with random #Check marginals are the same with random
m1.randomize() m1.randomize()
import ipdb;ipdb.set_trace()
m2[:] = m1[:] m2[:] = m1[:]
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)

View file

@ -1,32 +0,0 @@
# Copyright (c) 2013, Max Zwiessele
# Licensed under the BSD 3-clause license (see LICENSE.txt)
'''
Created on 10 Apr 2013
@author: maxz
'''
import unittest
import numpy as np
import GPy
class MRDTests(unittest.TestCase):
def test_gradients(self):
num_m = 3
N, num_inducing, input_dim, D = 20, 8, 6, 20
X = np.random.rand(N, input_dim)
k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim)
K = k.K(X)
Ylist = [np.random.multivariate_normal(np.zeros(N), K, input_dim).T for _ in range(num_m)]
likelihood_list = [GPy.likelihoods.Gaussian(Y) for Y in Ylist]
m = GPy.models.MRD(likelihood_list, input_dim=input_dim, kernels=k, num_inducing=num_inducing)
self.assertTrue(m.checkgrad())
if __name__ == "__main__":
print "Running unit tests, please be (very) patient..."
unittest.main()

View file

@ -60,7 +60,7 @@ class GradientTests(unittest.TestCase):
def test_GPRegression_mlp_1d(self): def test_GPRegression_mlp_1d(self):
''' Testing the GP regression with mlp kernel with white kernel on 1d data ''' ''' Testing the GP regression with mlp kernel with white kernel on 1d data '''
mlp = GPy.kern.mlp(1) mlp = GPy.kern.MLP(1)
self.check_model(mlp, model_type='GPRegression', dimension=1) self.check_model(mlp, model_type='GPRegression', dimension=1)
def test_GPRegression_poly_1d(self): def test_GPRegression_poly_1d(self):
@ -163,14 +163,14 @@ class GradientTests(unittest.TestCase):
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2) rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2) self.check_model(rbflin, model_type='SparseGPRegression', dimension=2)
#@unittest.expectedFailure # @unittest.expectedFailure
def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self): def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self):
''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs''' ''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs'''
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2) rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
raise unittest.SkipTest("This is not implemented yet!") raise unittest.SkipTest("This is not implemented yet!")
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1) self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1)
#@unittest.expectedFailure # @unittest.expectedFailure
def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self): def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self):
''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs''' ''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs'''
rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1) rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1)
@ -202,7 +202,7 @@ class GradientTests(unittest.TestCase):
X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None] X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None]
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
kernel = GPy.kern.RBF(1) kernel = GPy.kern.RBF(1)
m = GPy.models.GPClassification(X,Y,kernel=kernel) m = GPy.models.GPClassification(X, Y, kernel=kernel)
m.update_likelihood_approximation() m.update_likelihood_approximation()
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
@ -212,11 +212,11 @@ class GradientTests(unittest.TestCase):
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
Z = np.linspace(0, 15, 4)[:, None] Z = np.linspace(0, 15, 4)[:, None]
kernel = GPy.kern.RBF(1) kernel = GPy.kern.RBF(1)
m = GPy.models.SparseGPClassification(X,Y,kernel=kernel,Z=Z) m = GPy.models.SparseGPClassification(X, Y, kernel=kernel, Z=Z)
#distribution = GPy.likelihoods.likelihood_functions.Bernoulli() # distribution = GPy.likelihoods.likelihood_functions.Bernoulli()
#likelihood = GPy.likelihoods.EP(Y, distribution) # likelihood = GPy.likelihoods.EP(Y, distribution)
#m = GPy.core.SparseGP(X, likelihood, kernel, Z) # m = GPy.core.SparseGP(X, likelihood, kernel, Z)
#m.ensure_default_constraints() # m.ensure_default_constraints()
m.update_likelihood_approximation() m.update_likelihood_approximation()
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
@ -224,8 +224,8 @@ class GradientTests(unittest.TestCase):
N = 20 N = 20
X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None] X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None]
k = GPy.kern.RBF(1) + GPy.kern.White(1) k = GPy.kern.RBF(1) + GPy.kern.White(1)
Y = np.hstack([np.ones(N/2),np.zeros(N/2)])[:,None] Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
m = GPy.models.FITCClassification(X, Y, kernel = k) m = GPy.models.FITCClassification(X, Y, kernel=k)
m.update_likelihood_approximation() m.update_likelihood_approximation()
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
@ -238,7 +238,7 @@ class GradientTests(unittest.TestCase):
Y = np.vstack((Y1, Y2)) Y = np.vstack((Y1, Y2))
k1 = GPy.kern.RBF(1) k1 = GPy.kern.RBF(1)
m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) m = GPy.models.GPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
m.constrain_fixed('.*rbf_var', 1.) m.constrain_fixed('.*rbf_var', 1.)
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
@ -251,7 +251,7 @@ class GradientTests(unittest.TestCase):
Y = np.vstack((Y1, Y2)) Y = np.vstack((Y1, Y2))
k1 = GPy.kern.RBF(1) k1 = GPy.kern.RBF(1)
m = GPy.models.SparseGPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) m = GPy.models.SparseGPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
m.constrain_fixed('.*rbf_var', 1.) m.constrain_fixed('.*rbf_var', 1.)
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())