diff --git a/GPy/models/bayesian_gplvm_minibatch.py b/GPy/models/bayesian_gplvm_minibatch.py index 73324386..128ef9e8 100644 --- a/GPy/models/bayesian_gplvm_minibatch.py +++ b/GPy/models/bayesian_gplvm_minibatch.py @@ -106,7 +106,7 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): super(BayesianGPLVMMiniBatch,self).parameters_changed() kl_fctr = self.kl_factr - if kl_fctr > 0: + if kl_fctr > 0 and self.has_uncertain_inputs(): Xgrad = self.X.gradient.copy() self.X.gradient[:] = 0 self.variational_prior.update_gradients_KL(self.X) diff --git a/GPy/models/sparse_gplvm.py b/GPy/models/sparse_gplvm.py index 22852d93..53696b45 100644 --- a/GPy/models/sparse_gplvm.py +++ b/GPy/models/sparse_gplvm.py @@ -4,6 +4,7 @@ import sys from .sparse_gp_regression import SparseGPRegression +from ..core import Param class SparseGPLVM(SparseGPRegression): """ @@ -21,7 +22,9 @@ class SparseGPLVM(SparseGPRegression): if X is None: from ..util.initialization import initialize_latent X, fracs = initialize_latent(init, input_dim, Y) + X = Param('latent space', X) SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing) + self.link_parameter(self.X, 0) def parameters_changed(self): super(SparseGPLVM, self).parameters_changed() diff --git a/GPy/testing/bgplvm_minibatch_tests.py b/GPy/testing/minibatch_tests.py similarity index 50% rename from GPy/testing/bgplvm_minibatch_tests.py rename to GPy/testing/minibatch_tests.py index 4a824368..ea764558 100644 --- a/GPy/testing/bgplvm_minibatch_tests.py +++ b/GPy/testing/minibatch_tests.py @@ -103,6 +103,97 @@ class BGPLVMTest(unittest.TestCase): np.testing.assert_allclose(m.gradient, self.m_full.gradient) assert(m.checkgrad()) +class SparseGPMinibatchTest(unittest.TestCase): + + + def setUp(self): + np.random.seed(12345) + X, W = np.random.normal(0,1,(100,6)), np.random.normal(0,1,(6,13)) + Y = X.dot(W) + np.random.normal(0, .1, (X.shape[0], W.shape[1])) + self.inan = np.random.binomial(1, .1, Y.shape).astype(bool) + self.X, self.W, self.Y = X,W,Y + self.Q = 3 + self.m_full = GPy.models.SparseGPLVM(Y, self.Q, kernel=GPy.kern.RBF(self.Q, ARD=True)) + + def test_lik_comparisons_m1_s0(self): + # Test if the different implementations give the exact same likelihood as the full model. + # All of the following settings should give the same likelihood and gradients as the full model: + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + assert(m.checkgrad()) + + def test_predict_missing_data(self): + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1]) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + + mu1, var1 = m.predict(m.X, full_cov=False) + mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=False) + np.testing.assert_allclose(mu1, mu2) + for i in range(var1.shape[1]): + np.testing.assert_allclose(var1[:,[i]], var2) + + mu1, var1 = m.predict(m.X, full_cov=True) + mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=True) + np.testing.assert_allclose(mu1, mu2) + for i in range(var1.shape[2]): + np.testing.assert_allclose(var1[:,:,i], var2) + + def test_lik_comparisons_m0_s0(self): + # Test if the different implementations give the exact same likelihood as the full model. + # All of the following settings should give the same likelihood and gradients as the full model: + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=False) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + assert(m.checkgrad()) + + def test_lik_comparisons_m1_s1(self): + # Test if the different implementations give the exact same likelihood as the full model. + # All of the following settings should give the same likelihood and gradients as the full model: + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1]) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + assert(m.checkgrad()) + + def test_lik_comparisons_m0_s1(self): + # Test if the different implementations give the exact same likelihood as the full model. + # All of the following settings should give the same likelihood and gradients as the full model: + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=self.Y.shape[1]) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + assert(m.checkgrad()) + + def test_gradients_missingdata(self): + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False, batchsize=self.Y.shape[1]) + assert(m.checkgrad()) + + def test_gradients_missingdata_stochastics(self): + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=1) + assert(m.checkgrad()) + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=4) + assert(m.checkgrad()) + + def test_gradients_stochastics(self): + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=1) + assert(m.checkgrad()) + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=4) + assert(m.checkgrad()) + + def test_predict(self): + # Test if the different implementations give the exact same likelihood as the full model. + # All of the following settings should give the same likelihood and gradients as the full model: + m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1]) + m[:] = self.m_full[:] + np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7) + np.testing.assert_allclose(m.gradient, self.m_full.gradient) + assert(m.checkgrad()) + if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index 1212d746..b52d5eb7 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -515,16 +515,27 @@ class GradientTests(np.testing.TestCase): rbflin = GPy.kern.RBF(1) + GPy.kern.White(1) self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1) + def test_GPLVM_rbf_bias_white_kern_2D(self): """ Testing GPLVM with rbf + bias kernel """ N, input_dim, D = 50, 1, 2 X = np.random.rand(N, input_dim) - k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim) K = k.K(X) Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T m = GPy.models.GPLVM(Y, input_dim, kernel=k) self.assertTrue(m.checkgrad()) + def test_SparseGPLVM_rbf_bias_white_kern_2D(self): + """ Testing GPLVM with rbf + bias kernel """ + N, input_dim, D = 50, 1, 2 + X = np.random.rand(N, input_dim) + k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T + m = GPy.models.SparseGPLVM(Y, input_dim, kernel=k) + self.assertTrue(m.checkgrad()) + def test_BCGPLVM_rbf_bias_white_kern_2D(self): """ Testing GPLVM with rbf + bias kernel """ N, input_dim, D = 50, 1, 2