diff --git a/GPy/testing/cacher_tests.py b/GPy/testing/cacher_tests.py new file mode 100644 index 00000000..60f79ba2 --- /dev/null +++ b/GPy/testing/cacher_tests.py @@ -0,0 +1,37 @@ +''' +Created on 4 Sep 2015 + +@author: maxz +''' +import unittest +from GPy.util.caching import Cacher +from pickle import PickleError + + +class Test(unittest.TestCase): + def setUp(self): + def op(x): + return x + self.cache = Cacher(op, 1) + + def test_pickling(self): + self.assertRaises(PickleError, self.cache.__getstate__) + self.assertRaises(PickleError, self.cache.__setstate__) + + def test_copy(self): + tmp = self.cache.__deepcopy__() + assert(tmp.operation is self.cache.operation) + self.assertEqual(tmp.limit, self.cache.limit) + + def test_reset(self): + self.cache.reset() + self.assertDictEqual(self.cache.cached_input_ids, {}, ) + self.assertDictEqual(self.cache.cached_outputs, {}, ) + self.assertDictEqual(self.cache.inputs_changed, {}, ) + + def test_name(self): + assert(self.cache.__name__ == self.cache.operation.__name__) + +if __name__ == "__main__": + #import sys;sys.argv = ['', 'Test.testName'] + unittest.main() \ No newline at end of file diff --git a/GPy/testing/gp_tests.py b/GPy/testing/gp_tests.py new file mode 100644 index 00000000..07aa31a3 --- /dev/null +++ b/GPy/testing/gp_tests.py @@ -0,0 +1,99 @@ +''' +Created on 4 Sep 2015 + +@author: maxz +''' +import unittest +import numpy as np, GPy +from GPy.core.parameterization.variational import NormalPosterior + +class Test(unittest.TestCase): + + + def setUp(self): + np.random.seed(12345) + self.N = 20 + self.N_new = 50 + self.D = 1 + self.X = np.random.uniform(-3., 3., (self.N, 1)) + self.Y = np.sin(self.X) + np.random.randn(self.N, self.D) * 0.05 + self.X_new = np.random.uniform(-3., 3., (self.N_new, 1)) + + + def test_setxy_bgplvm(self): + k = GPy.kern.RBF(1) + m = GPy.models.BayesianGPLVM(self.Y, 2, kernel=k) + mu, var = m.predict(m.X) + X = m.X.copy() + Xnew = NormalPosterior(m.X.mean[:10].copy(), m.X.variance[:10].copy()) + m.set_XY(Xnew, m.Y[:10]) + assert(m.checkgrad()) + m.set_XY(X, self.Y) + mu2, var2 = m.predict(m.X) + np.testing.assert_allclose(mu, mu2) + np.testing.assert_allclose(var, var2) + + def test_setxy_gplvm(self): + k = GPy.kern.RBF(1) + m = GPy.models.GPLVM(self.Y, 2, kernel=k) + mu, var = m.predict(m.X) + X = m.X.copy() + Xnew = X[:10].copy() + m.set_XY(Xnew, m.Y[:10]) + assert(m.checkgrad()) + m.set_XY(X, self.Y) + mu2, var2 = m.predict(m.X) + np.testing.assert_allclose(mu, mu2) + np.testing.assert_allclose(var, var2) + + def test_setxy_gp(self): + k = GPy.kern.RBF(1) + m = GPy.models.GPRegression(self.X, self.Y, kernel=k) + mu, var = m.predict(m.X) + X = m.X.copy() + m.set_XY(m.X[:10], m.Y[:10]) + assert(m.checkgrad()) + m.set_XY(X, self.Y) + mu2, var2 = m.predict(m.X) + np.testing.assert_allclose(mu, mu2) + np.testing.assert_allclose(var, var2) + + def test_mean_function(self): + from GPy.core.parameterization.param import Param + from GPy.core.mapping import Mapping + class Parabola(Mapping): + def __init__(self, variance, degree=2, name='parabola'): + super(Parabola, self).__init__(1, 1, name) + self.variance = Param('variance', np.ones(degree+1) * variance) + self.degree = degree + self.link_parameter(self.variance) + + def f(self, X): + p = self.variance[0] * np.ones(X.shape) + for i in range(1, self.degree+1): + p += self.variance[i] * X**(i) + return p + + def gradients_X(self, dL_dF, X): + grad = np.zeros(X.shape) + for i in range(1, self.degree+1): + grad += (i) * self.variance[i] * X**(i-1) + return grad + + def update_gradients(self, dL_dF, X): + for i in range(self.degree+1): + self.variance.gradient[i] = (dL_dF * X**(i)).sum(0) + X = np.linspace(-2, 2, 100)[:, None] + k = GPy.kern.RBF(1) + k.randomize() + p = Parabola(.3) + p.randomize() + Y = p.f(X) + np.random.multivariate_normal(np.zeros(X.shape[0]), k.K(X))[:,None] + np.random.normal(0, .1, (X.shape[0], 1)) + m = GPy.models.GPRegression(X, Y, mean_function=p) + m.randomize() + assert(m.checkgrad()) + _ = m.predict(m.X) + +if __name__ == "__main__": + #import sys;sys.argv = ['', 'Test.testName'] + unittest.main() \ No newline at end of file diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index 648e1174..75165c0e 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -55,13 +55,44 @@ class MiscTests(unittest.TestCase): np.testing.assert_allclose(mu1, (mu2*std)+mu) np.testing.assert_allclose(var1, var2) + q50n = m.predict_quantiles(m.X, (50,)) + q50 = m2.predict_quantiles(m2.X, (50,)) + np.testing.assert_allclose(q50n[0], (q50[0]*std)+mu) + + def check_jacobian(self): + try: + import autograd.numpy as np, autograd as ag, GPy, matplotlib.pyplot as plt + except: + raise self.skipTest("autograd not available to check gradients") + def k(X, X2, alpha=1., lengthscale=None): + if lengthscale is None: + lengthscale = np.ones(X.shape[1]) + exp = 0. + for q in range(X.shape[1]): + exp += ((X[:, [q]] - X2[:, [q]].T)/lengthscale[q])**2 + #exp = np.sqrt(exp) + return alpha * np.exp(-.5*exp) + dk = ag.elementwise_grad(lambda x, x2: k(x, x2, alpha=ke.variance.values, lengthscale=ke.lengthscale.values)) + dkdk = ag.elementwise_grad(dk, argnum=1) + + ke = GPy.kern.RBF(1, ARD=True) + #ke.randomize() + ke.variance = .2#.randomize() + ke.lengthscale[:] = .5 + ke.randomize() + X = np.linspace(-1, 1, 1000)[:,None] + X2 = np.array([[0.]]).T + np.testing.assert_allclose(ke.gradients_X([[1.]], X, X), dk(X, X)) + np.testing.assert_allclose(ke.gradients_XX([[1.]], X, X).sum(0), dkdk(X, X)) + np.testing.assert_allclose(ke.gradients_X([[1.]], X, X2), dk(X, X2)) + np.testing.assert_allclose(ke.gradients_XX([[1.]], X, X2).sum(0), dkdk(X, X2)) + def test_sparse_raw_predict(self): k = GPy.kern.RBF(1) m = GPy.models.SparseGPRegression(self.X, self.Y, kernel=k) m.randomize() Z = m.Z[:] - X = self.X[:] # Not easy to check if woodbury_inv is correct in itself as it requires a large derivation and expression Kinv = m.posterior.woodbury_inv @@ -147,11 +178,24 @@ class MiscTests(unittest.TestCase): m = BayesianGPLVMMiniBatch(Ymissing, Q, init="random", num_inducing=num_inducing, kernel=k, missing_data=True) assert(m.checkgrad()) + mul, varl = m.predict(m.X) k = kern.RBF(Q, ARD=True) + kern.White(Q, np.exp(-2)) # + kern.bias(Q) - m = BayesianGPLVMMiniBatch(Ymissing, Q, init="random", num_inducing=num_inducing, + m2 = BayesianGPLVMMiniBatch(Ymissing, Q, init="random", num_inducing=num_inducing, kernel=k, missing_data=True) assert(m.checkgrad()) + m2.kern.rbf.lengthscale[:] = 1e6 + m2.X[:] = m.X.param_array + m2.likelihood[:] = m.likelihood[:] + m2.kern.white[:] = m.kern.white[:] + mu, var = m.predict(m.X) + np.testing.assert_allclose(mul, mu) + np.testing.assert_allclose(varl, var) + + q50 = m.predict_quantiles(m.X, (50,)) + np.testing.assert_allclose(mul, q50[0]) + + def test_likelihood_replicate_kern(self): m = GPy.models.GPRegression(self.X, self.Y) diff --git a/GPy/testing/run_coverage.sh b/GPy/testing/run_coverage.sh index 6b6e8cb2..f2e52230 100755 --- a/GPy/testing/run_coverage.sh +++ b/GPy/testing/run_coverage.sh @@ -1 +1 @@ -nosetests . --with-coverage --cover-html --cover-html-dir=coverage --cover-package=GPy --cover-erase +nosetests . --with-coverage --logging-level=INFO --cover-html --cover-html-dir=coverage --cover-package=GPy --cover-erase