[stochastics] update for new stochastic iptimizers in gpy

This commit is contained in:
mzwiessele 2016-03-08 09:47:24 +00:00
parent c72d085e2b
commit e4a4344334
4 changed files with 152 additions and 2 deletions

View file

@ -124,6 +124,24 @@ class SparseGPMinibatchTest(unittest.TestCase):
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
assert(m.checkgrad())
def test_sparsegp_init(self):
# Test if the different implementations give the exact same likelihood as the full model.
# All of the following settings should give the same likelihood and gradients as the full model:
np.random.seed(1234)
Z = self.X[np.random.choice(self.X.shape[0], replace=False, size=10)].copy()
Q = Z.shape[1]
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=True, stochastic=False)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=True, stochastic=True)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=False, stochastic=False)
assert(m.checkgrad())
m = GPy.models.sparse_gp_minibatch.SparseGPMiniBatch(self.X, self.Y, Z, GPy.kern.RBF(Q)+GPy.kern.Matern32(Q)+GPy.kern.Bias(Q), GPy.likelihoods.Gaussian(), missing_data=False, stochastic=True)
assert(m.checkgrad())
def test_predict_missing_data(self):
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
m[:] = self.m_full[:]