merge with upstream

This commit is contained in:
Zhenwen Dai 2016-03-10 18:17:35 +00:00
commit ba74e29aee
115 changed files with 1178 additions and 531 deletions

View file

@ -1,14 +1,9 @@
# Copyright (c) 2014, Max Zwiessele
# Copyright (c) 2014, Max Zwiessele, GPy Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
"""
MaxZ
"""
import unittest
import sys
def deepTest(reason):
if reason:
return lambda x:x
return unittest.skip("Not deep scanning, enable deepscan by adding 'deep' argument")
return unittest.skip("Not deep scanning, enable deepscan by adding 'deep' argument to unittest call")

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.1 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

After

Width:  |  Height:  |  Size: 46 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9 KiB

After

Width:  |  Height:  |  Size: 8.5 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 44 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

After

Width:  |  Height:  |  Size: 8.3 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 6.9 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.8 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 4.3 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

After

Width:  |  Height:  |  Size: 52 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.6 KiB

After

Width:  |  Height:  |  Size: 6.8 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.8 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.7 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.8 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 30 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3 KiB

After

Width:  |  Height:  |  Size: 3 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

After

Width:  |  Height:  |  Size: 7.7 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 29 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 44 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9 KiB

After

Width:  |  Height:  |  Size: 8.5 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 43 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.8 KiB

After

Width:  |  Height:  |  Size: 4.9 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 5.7 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

After

Width:  |  Height:  |  Size: 23 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 27 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 27 KiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 4 KiB

Before After
Before After

View file

@ -1,109 +0,0 @@
'''
Created on 4 Sep 2015
@author: maxz
'''
import unittest
import numpy as np
import GPy
class BGPLVMTest(unittest.TestCase):
def setUp(self):
np.random.seed(12345)
X, W = np.random.normal(0,1,(100,6)), np.random.normal(0,1,(6,13))
Y = X.dot(W) + np.random.normal(0, .1, (X.shape[0], W.shape[1]))
self.inan = np.random.binomial(1, .1, Y.shape).astype(bool)
self.X, self.W, self.Y = X,W,Y
self.Q = 3
self.m_full = GPy.models.BayesianGPLVM(Y, self.Q)
def test_lik_comparisons_m1_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_predict_missing_data(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
self.assertRaises(NotImplementedError, m.predict, m.X, full_cov=True)
mu1, var1 = m.predict(m.X, full_cov=False)
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=False)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1, var2)
mu1, var1 = m.predict(m.X.mean, full_cov=True)
mu2, var2 = self.m_full.predict(self.m_full.X.mean, full_cov=True)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1[:,:,0], var2)
mu1, var1 = m.predict(m.X.mean, full_cov=False)
mu2, var2 = self.m_full.predict(self.m_full.X.mean, full_cov=False)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1[:,[0]], var2)
def test_lik_comparisons_m0_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m1_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m0_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_gradients_missingdata(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=False, batchsize=self.Y.shape[1])
assert(m.checkgrad())
def test_gradients_missingdata_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_gradients_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_predict(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()

View file

@ -97,4 +97,4 @@ class Test(unittest.TestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
unittest.main()

View file

@ -324,8 +324,18 @@ class KernelGradientTestsContinuous(unittest.TestCase):
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Poly(self):
k = GPy.kern.Poly(self.D, order=5)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_WhiteHeteroscedastic(self):
k = GPy.kern.WhiteHeteroscedastic(self.D, self.X.shape[0])
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_standard_periodic(self):
k = GPy.kern.StdPeriodic(self.D, self.D-1)
k = GPy.kern.StdPeriodic(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
@ -334,11 +344,14 @@ class KernelTestsMiscellaneous(unittest.TestCase):
N, D = 100, 10
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.random.uniform(-10,10,D)
self.rbf = GPy.kern.RBF(2, active_dims=np.arange(0,4,2))
self.rbf.randomize()
self.linear = GPy.kern.Linear(2, active_dims=(3,9))
self.linear.randomize()
self.matern = GPy.kern.Matern32(3, active_dims=np.array([1,7,9]))
self.matern.randomize()
self.sumkern = self.rbf + self.linear
self.sumkern += self.matern
self.sumkern.randomize()
#self.sumkern.randomize()
def test_which_parts(self):
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X)))
@ -348,6 +361,21 @@ class KernelTestsMiscellaneous(unittest.TestCase):
def test_active_dims(self):
np.testing.assert_array_equal(self.sumkern.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(self.sumkern._all_dims_active, range(10))
tmp = self.linear+self.rbf
np.testing.assert_array_equal(tmp.active_dims, [0,2,3,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf*self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf+self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern*self.rbf*self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
class KernelTestsNonContinuous(unittest.TestCase):
def setUp(self):

View file

@ -0,0 +1,226 @@
'''
Created on 4 Sep 2015
@author: maxz
'''
import unittest
import numpy as np
import GPy
class BGPLVMTest(unittest.TestCase):
def setUp(self):
np.random.seed(12345)
X, W = np.random.normal(0,1,(100,6)), np.random.normal(0,1,(6,13))
Y = X.dot(W) + np.random.normal(0, .1, (X.shape[0], W.shape[1]))
self.inan = np.random.binomial(1, .1, Y.shape).astype(bool)
self.X, self.W, self.Y = X,W,Y
self.Q = 3
self.m_full = GPy.models.BayesianGPLVM(Y, self.Q)
def test_lik_comparisons_m1_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_predict_missing_data(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
self.assertRaises(NotImplementedError, m.predict, m.X, full_cov=True)
mu1, var1 = m.predict(m.X, full_cov=False)
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=False)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1, var2)
mu1, var1 = m.predict(m.X.mean, full_cov=True)
mu2, var2 = self.m_full.predict(self.m_full.X.mean, full_cov=True)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1[:,:,0], var2)
mu1, var1 = m.predict(m.X.mean, full_cov=False)
mu2, var2 = self.m_full.predict(self.m_full.X.mean, full_cov=False)
np.testing.assert_allclose(mu1, mu2)
np.testing.assert_allclose(var1[:,[0]], var2)
def test_lik_comparisons_m0_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=self.m_full.X.variance.values, missing_data=False, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m1_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m0_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_gradients_missingdata(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=False, batchsize=self.Y.shape[1])
assert(m.checkgrad())
def test_gradients_missingdata_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_gradients_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=False, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_predict(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
class SparseGPMinibatchTest(unittest.TestCase):
def setUp(self):
np.random.seed(12345)
X, W = np.random.normal(0,1,(100,6)), np.random.normal(0,1,(6,13))
Y = X.dot(W) + np.random.normal(0, .1, (X.shape[0], W.shape[1]))
self.inan = np.random.binomial(1, .1, Y.shape).astype(bool)
self.X, self.W, self.Y = X,W,Y
self.Q = 3
self.m_full = GPy.models.SparseGPLVM(Y, self.Q, kernel=GPy.kern.RBF(self.Q, ARD=True))
def test_lik_comparisons_m1_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_sparsegp_init(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
np.random.seed(1234)
Z = self.X[np.random.choice(self.X.shape[0], replace=False, size=10)].copy()
Q = Z.shape[1]
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=True, stochastic=False)
assert(m.checkgrad())
m.optimize('adadelta', max_iters=10)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=True, stochastic=True)
assert(m.checkgrad())
m.optimize('rprop', max_iters=10)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=False, stochastic=False)
assert(m.checkgrad())
m.optimize('rprop', max_iters=10)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=False, stochastic=True)
assert(m.checkgrad())
m.optimize('adadelta', max_iters=10)
assert(m.checkgrad())
def test_predict_missing_data(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
mu1, var1 = m.predict(m.X, full_cov=False)
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=False)
np.testing.assert_allclose(mu1, mu2)
for i in range(var1.shape[1]):
np.testing.assert_allclose(var1[:,[i]], var2)
mu1, var1 = m.predict(m.X, full_cov=True)
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=True)
np.testing.assert_allclose(mu1, mu2)
for i in range(var1.shape[2]):
np.testing.assert_allclose(var1[:,:,i], var2)
def test_lik_comparisons_m0_s0(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=False)
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m1_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_lik_comparisons_m0_s1(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_gradients_missingdata(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False, batchsize=self.Y.shape[1])
assert(m.checkgrad())
def test_gradients_missingdata_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_gradients_stochastics(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=1)
assert(m.checkgrad())
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=4)
assert(m.checkgrad())
def test_predict(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()

View file

@ -553,16 +553,27 @@ class GradientTests(np.testing.TestCase):
rbflin = GPy.kern.RBF(1) + GPy.kern.White(1)
self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1)
def test_GPLVM_rbf_bias_white_kern_2D(self):
""" Testing GPLVM with rbf + bias kernel """
N, input_dim, D = 50, 1, 2
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05)
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
m = GPy.models.GPLVM(Y, input_dim, kernel=k)
self.assertTrue(m.checkgrad())
def test_SparseGPLVM_rbf_bias_white_kern_2D(self):
""" Testing GPLVM with rbf + bias kernel """
N, input_dim, D = 50, 1, 2
X = np.random.rand(N, input_dim)
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim)
K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
m = GPy.models.SparseGPLVM(Y, input_dim, kernel=k)
self.assertTrue(m.checkgrad())
def test_BCGPLVM_rbf_bias_white_kern_2D(self):
""" Testing GPLVM with rbf + bias kernel """
N, input_dim, D = 50, 1, 2

View file

@ -100,7 +100,7 @@ def _image_comparison(baseline_images, extensions=['pdf','svg','png'], tol=11):
fig.axes[0].set_axis_off()
fig.set_frameon(False)
fig.canvas.draw()
fig.savefig(os.path.join(result_dir, "{}.{}".format(base, ext)), transparent=True, edgecolor='none', facecolor='none')
fig.savefig(os.path.join(result_dir, "{}.{}".format(base, ext)), transparent=True, edgecolor='none', facecolor='none', bbox='tight')
for num, base in zip(plt.get_fignums(), baseline_images):
for ext in extensions:
#plt.close(num)
@ -116,7 +116,7 @@ def _image_comparison(baseline_images, extensions=['pdf','svg','png'], tol=11):
def test_figure():
np.random.seed(1239847)
from GPy.plotting import plotting_library as pl
import matplotlib
#import matplotlib
matplotlib.rcParams.update(matplotlib.rcParamsDefault)
matplotlib.rcParams[u'figure.figsize'] = (4,3)
matplotlib.rcParams[u'text.usetex'] = False
@ -160,7 +160,7 @@ def test_figure():
def test_kernel():
np.random.seed(1239847)
import matplotlib
#import matplotlib
matplotlib.rcParams.update(matplotlib.rcParamsDefault)
matplotlib.rcParams[u'figure.figsize'] = (4,3)
matplotlib.rcParams[u'text.usetex'] = False

49
GPy/testing/util_tests.py Normal file
View file

@ -0,0 +1,49 @@
#===============================================================================
# Copyright (c) 2016, Max Zwiessele
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of GPy.testing.util_tests nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
import unittest, numpy as np
class TestDebug(unittest.TestCase):
def test_checkFinite(self):
from GPy.util.debug import checkFinite
array = np.random.normal(0, 1, 100).reshape(25,4)
self.assertTrue(checkFinite(array, name='test'))
array[np.random.binomial(1, .3, array.shape).astype(bool)] = np.nan
self.assertFalse(checkFinite(array))
def test_checkFullRank(self):
from GPy.util.debug import checkFullRank
from GPy.util.linalg import tdot
array = np.random.normal(0, 1, 100).reshape(25,4)
self.assertFalse(checkFullRank(tdot(array), name='test'))
array = np.random.normal(0, 1, (25,25))
self.assertTrue(checkFullRank(tdot(array)))