diff --git a/GPy/testing/bgplvm_tests.py b/GPy/testing/bgplvm_tests.py deleted file mode 100644 index fd55d314..00000000 --- a/GPy/testing/bgplvm_tests.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) 2012, Nicolo Fusi -# Licensed under the BSD 3-clause license (see LICENSE.txt) - -import unittest -import numpy as np -import GPy -from ..models import BayesianGPLVM - -class BGPLVMTests(unittest.TestCase): - def test_bias_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - def test_linear_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - def test_rbf_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - def test_rbf_bias_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - def test_rbf_line_kern(self): - N, num_inducing, input_dim, D = 10, 3, 2, 4 - X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim) + GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - def test_linear_bias_kern(self): - N, num_inducing, input_dim, D = 30, 5, 4, 30 - X = np.random.rand(N, input_dim) - k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - K = k.K(X) - Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T - Y -= Y.mean(axis=0) - k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001) - m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) - m.randomize() - self.assertTrue(m.checkgrad()) - - -if __name__ == "__main__": - print "Running unit tests, please be (very) patient..." - unittest.main() diff --git a/GPy/testing/kernel_tests.py b/GPy/testing/kernel_tests.py index 2789d1de..657f5ac4 100644 --- a/GPy/testing/kernel_tests.py +++ b/GPy/testing/kernel_tests.py @@ -33,9 +33,10 @@ class Kern_check_model(GPy.core.Model): self.X2 = X2 self.dL_dK = dL_dK - def is_positive_definite(self): + def is_positive_semi_definite(self): v = np.linalg.eig(self.kernel.K(self.X))[0] - if any(v<-10*sys.float_info.epsilon): + if any(v.real<=-1e-10): + print v.real.min() return False else: return True @@ -89,7 +90,7 @@ class Kern_check_dKdiag_dX(Kern_check_dK_dX): return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum() def parameters_changed(self): - self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK, self.X) + self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK.diagonal(), self.X) @@ -119,7 +120,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb if verbose: print("Checking covariance function is positive definite.") - result = Kern_check_model(kern, X=X).is_positive_definite() + result = Kern_check_model(kern, X=X).is_positive_semi_definite() if result and verbose: print("Check passed.") if not result: @@ -214,18 +215,55 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb class KernelGradientTestsContinuous(unittest.TestCase): def setUp(self): - self.X = np.random.randn(100,2) - self.X2 = np.random.randn(110,2) + self.N, self.D = 100, 5 + self.X = np.random.randn(self.N,self.D) + self.X2 = np.random.randn(self.N+10,self.D) continuous_kerns = ['RBF', 'Linear'] self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns] def test_Matern32(self): - k = GPy.kern.Matern32(2) + k = GPy.kern.Matern32(self.D) + k.randomize() self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) def test_Matern52(self): - k = GPy.kern.Matern52(2) + k = GPy.kern.Matern52(self.D) + k.randomize() + self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) + + def test_RBF(self): + k = GPy.kern.RBF(self.D) + k.randomize() + self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) + + def test_Linear(self): + k = GPy.kern.Linear(self.D) + k.randomize() + self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) + +class KernelGradientTestsContinuous1D(unittest.TestCase): + def setUp(self): + self.N, self.D = 100, 1 + self.X = np.random.randn(self.N,self.D) + self.X2 = np.random.randn(self.N+10,self.D) + + continuous_kerns = ['RBF', 'Linear'] + self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns] + + def test_PeriodicExponential(self): + k = GPy.kern.PeriodicExponential(self.D) + k.randomize() + self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) + + def test_PeriodicMatern32(self): + k = GPy.kern.PeriodicMatern32(self.D) + k.randomize() + self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) + + def test_PeriodicMatern52(self): + k = GPy.kern.PeriodicMatern52(self.D) + k.randomize() self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) #TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize @@ -251,6 +289,7 @@ class KernelTestsMiscellaneous(unittest.TestCase): self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X))) self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X))) + if __name__ == "__main__": print "Running unit tests, please be (very) patient..." unittest.main() diff --git a/GPy/testing/likelihood_tests.py b/GPy/testing/likelihood_tests.py index c71842d8..ab26910e 100644 --- a/GPy/testing/likelihood_tests.py +++ b/GPy/testing/likelihood_tests.py @@ -541,7 +541,8 @@ class TestNoiseModels(object): #import ipdb; ipdb.set_trace() #NOTE this test appears to be stochastic for some likelihoods (student t?) # appears to all be working in test mode right now... - + if isinstance(model, GPy.likelihoods.StudentT): + import ipdb;ipdb.set_trace() assert m.checkgrad(step=step) ########### @@ -700,7 +701,6 @@ class LaplaceTests(unittest.TestCase): np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) #Check marginals are the same with random m1.randomize() - import ipdb;ipdb.set_trace() m2[:] = m1[:] np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2) diff --git a/GPy/testing/mrd_tests.py b/GPy/testing/mrd_tests.py deleted file mode 100644 index 40fcb86a..00000000 --- a/GPy/testing/mrd_tests.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2013, Max Zwiessele -# Licensed under the BSD 3-clause license (see LICENSE.txt) -''' -Created on 10 Apr 2013 - -@author: maxz -''' - -import unittest -import numpy as np -import GPy - -class MRDTests(unittest.TestCase): - - def test_gradients(self): - num_m = 3 - N, num_inducing, input_dim, D = 20, 8, 6, 20 - X = np.random.rand(N, input_dim) - - k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim) - K = k.K(X) - - Ylist = [np.random.multivariate_normal(np.zeros(N), K, input_dim).T for _ in range(num_m)] - likelihood_list = [GPy.likelihoods.Gaussian(Y) for Y in Ylist] - - m = GPy.models.MRD(likelihood_list, input_dim=input_dim, kernels=k, num_inducing=num_inducing) - - self.assertTrue(m.checkgrad()) - -if __name__ == "__main__": - print "Running unit tests, please be (very) patient..." - unittest.main() diff --git a/GPy/testing/parameterized_tests.py b/GPy/testing/parameterized_tests.py index b2f57144..6555b8f4 100644 --- a/GPy/testing/parameterized_tests.py +++ b/GPy/testing/parameterized_tests.py @@ -16,21 +16,21 @@ class Test(unittest.TestCase): from GPy.core.parameterization import Param from GPy.core.parameterization.transformations import Logistic self.param = Param('param', np.random.rand(25,2), Logistic(0, 1)) - + self.test1 = GPy.core.Parameterized("test model") self.test1.add_parameter(self.white) self.test1.add_parameter(self.rbf, 0) self.test1.add_parameter(self.param) - + x = np.linspace(-2,6,4)[:,None] y = np.sin(x) self.testmodel = GPy.models.GPRegression(x,y) - + def test_add_parameter(self): self.assertEquals(self.rbf._parent_index_, 0) self.assertEquals(self.white._parent_index_, 1) pass - + def test_fixes(self): self.white.fix(warning=False) self.test1.remove_parameter(self.test1.param) @@ -41,18 +41,18 @@ class Test(unittest.TestCase): self.test1.add_parameter(self.white, 0) self.assertListEqual(self.test1._fixes_.tolist(),[FIXED,UNFIXED,UNFIXED]) - + def test_remove_parameter(self): from GPy.core.parameterization.transformations import FIXED, UNFIXED, __fixed__, Logexp self.white.fix() self.test1.remove_parameter(self.white) self.assertIs(self.test1._fixes_,None) - + self.assertListEqual(self.white._fixes_.tolist(), [FIXED]) self.assertEquals(self.white.constraints._offset, 0) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops) - + self.test1.add_parameter(self.white, 0) self.assertIs(self.test1.constraints, self.white.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) @@ -60,17 +60,17 @@ class Test(unittest.TestCase): self.assertListEqual(self.test1.constraints[__fixed__].tolist(), [0]) self.assertIs(self.white._fixes_,None) self.assertListEqual(self.test1._fixes_.tolist(),[FIXED] + [UNFIXED] * 52) - + self.test1.remove_parameter(self.white) self.assertIs(self.test1._fixes_,None) self.assertListEqual(self.white._fixes_.tolist(), [FIXED]) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops) self.assertListEqual(self.test1.constraints[Logexp()].tolist(), [0,1]) - + def test_add_parameter_already_in_hirarchy(self): self.assertRaises(HierarchyError, self.test1.add_parameter, self.white._parameters_[0]) - + def test_default_constraints(self): self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops) self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops) @@ -83,7 +83,7 @@ class Test(unittest.TestCase): self.rbf.constrain(GPy.transformations.Square(), False) self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), range(2)) self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp()].tolist(), [2]) - + self.test1.remove_parameter(self.rbf) self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), []) diff --git a/GPy/testing/unit_tests.py b/GPy/testing/unit_tests.py index 0cb4cd66..1aec7d7a 100644 --- a/GPy/testing/unit_tests.py +++ b/GPy/testing/unit_tests.py @@ -34,7 +34,7 @@ class GradientTests(unittest.TestCase): model_fit = getattr(GPy.models, model_type) # noise = GPy.kern.White(dimension) - kern = kern # + noise + kern = kern # + noise if uncertain_inputs: m = model_fit(X, Y, kernel=kern, X_variance=np.random.rand(X.shape[0], X.shape[1])) else: @@ -60,7 +60,7 @@ class GradientTests(unittest.TestCase): def test_GPRegression_mlp_1d(self): ''' Testing the GP regression with mlp kernel with white kernel on 1d data ''' - mlp = GPy.kern.mlp(1) + mlp = GPy.kern.MLP(1) self.check_model(mlp, model_type='GPRegression', dimension=1) def test_GPRegression_poly_1d(self): @@ -163,14 +163,14 @@ class GradientTests(unittest.TestCase): rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2) self.check_model(rbflin, model_type='SparseGPRegression', dimension=2) - #@unittest.expectedFailure + # @unittest.expectedFailure def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self): ''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs''' rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2) raise unittest.SkipTest("This is not implemented yet!") self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1) - #@unittest.expectedFailure + # @unittest.expectedFailure def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self): ''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs''' rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1) @@ -202,7 +202,7 @@ class GradientTests(unittest.TestCase): X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None] Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] kernel = GPy.kern.RBF(1) - m = GPy.models.GPClassification(X,Y,kernel=kernel) + m = GPy.models.GPClassification(X, Y, kernel=kernel) m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) @@ -212,11 +212,11 @@ class GradientTests(unittest.TestCase): Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] Z = np.linspace(0, 15, 4)[:, None] kernel = GPy.kern.RBF(1) - m = GPy.models.SparseGPClassification(X,Y,kernel=kernel,Z=Z) - #distribution = GPy.likelihoods.likelihood_functions.Bernoulli() - #likelihood = GPy.likelihoods.EP(Y, distribution) - #m = GPy.core.SparseGP(X, likelihood, kernel, Z) - #m.ensure_default_constraints() + m = GPy.models.SparseGPClassification(X, Y, kernel=kernel, Z=Z) + # distribution = GPy.likelihoods.likelihood_functions.Bernoulli() + # likelihood = GPy.likelihoods.EP(Y, distribution) + # m = GPy.core.SparseGP(X, likelihood, kernel, Z) + # m.ensure_default_constraints() m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) @@ -224,8 +224,8 @@ class GradientTests(unittest.TestCase): N = 20 X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None] k = GPy.kern.RBF(1) + GPy.kern.White(1) - Y = np.hstack([np.ones(N/2),np.zeros(N/2)])[:,None] - m = GPy.models.FITCClassification(X, Y, kernel = k) + Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None] + m = GPy.models.FITCClassification(X, Y, kernel=k) m.update_likelihood_approximation() self.assertTrue(m.checkgrad()) @@ -238,7 +238,7 @@ class GradientTests(unittest.TestCase): Y = np.vstack((Y1, Y2)) k1 = GPy.kern.RBF(1) - m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) + m = GPy.models.GPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1]) m.constrain_fixed('.*rbf_var', 1.) self.assertTrue(m.checkgrad()) @@ -251,7 +251,7 @@ class GradientTests(unittest.TestCase): Y = np.vstack((Y1, Y2)) k1 = GPy.kern.RBF(1) - m = GPy.models.SparseGPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1]) + m = GPy.models.SparseGPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1]) m.constrain_fixed('.*rbf_var', 1.) self.assertTrue(m.checkgrad())