mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 20:42:39 +02:00
[sparsegplvm] added sparsegplvm and tests for minibatch sparsegplvm
This commit is contained in:
parent
99caca6702
commit
b0347c5108
4 changed files with 107 additions and 2 deletions
|
|
@ -106,7 +106,7 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
|
||||||
super(BayesianGPLVMMiniBatch,self).parameters_changed()
|
super(BayesianGPLVMMiniBatch,self).parameters_changed()
|
||||||
|
|
||||||
kl_fctr = self.kl_factr
|
kl_fctr = self.kl_factr
|
||||||
if kl_fctr > 0:
|
if kl_fctr > 0 and self.has_uncertain_inputs():
|
||||||
Xgrad = self.X.gradient.copy()
|
Xgrad = self.X.gradient.copy()
|
||||||
self.X.gradient[:] = 0
|
self.X.gradient[:] = 0
|
||||||
self.variational_prior.update_gradients_KL(self.X)
|
self.variational_prior.update_gradients_KL(self.X)
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from .sparse_gp_regression import SparseGPRegression
|
from .sparse_gp_regression import SparseGPRegression
|
||||||
|
from ..core import Param
|
||||||
|
|
||||||
class SparseGPLVM(SparseGPRegression):
|
class SparseGPLVM(SparseGPRegression):
|
||||||
"""
|
"""
|
||||||
|
|
@ -21,7 +22,9 @@ class SparseGPLVM(SparseGPRegression):
|
||||||
if X is None:
|
if X is None:
|
||||||
from ..util.initialization import initialize_latent
|
from ..util.initialization import initialize_latent
|
||||||
X, fracs = initialize_latent(init, input_dim, Y)
|
X, fracs = initialize_latent(init, input_dim, Y)
|
||||||
|
X = Param('latent space', X)
|
||||||
SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing)
|
SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing)
|
||||||
|
self.link_parameter(self.X, 0)
|
||||||
|
|
||||||
def parameters_changed(self):
|
def parameters_changed(self):
|
||||||
super(SparseGPLVM, self).parameters_changed()
|
super(SparseGPLVM, self).parameters_changed()
|
||||||
|
|
|
||||||
|
|
@ -103,6 +103,97 @@ class BGPLVMTest(unittest.TestCase):
|
||||||
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
assert(m.checkgrad())
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
class SparseGPMinibatchTest(unittest.TestCase):
|
||||||
|
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
np.random.seed(12345)
|
||||||
|
X, W = np.random.normal(0,1,(100,6)), np.random.normal(0,1,(6,13))
|
||||||
|
Y = X.dot(W) + np.random.normal(0, .1, (X.shape[0], W.shape[1]))
|
||||||
|
self.inan = np.random.binomial(1, .1, Y.shape).astype(bool)
|
||||||
|
self.X, self.W, self.Y = X,W,Y
|
||||||
|
self.Q = 3
|
||||||
|
self.m_full = GPy.models.SparseGPLVM(Y, self.Q, kernel=GPy.kern.RBF(self.Q, ARD=True))
|
||||||
|
|
||||||
|
def test_lik_comparisons_m1_s0(self):
|
||||||
|
# Test if the different implementations give the exact same likelihood as the full model.
|
||||||
|
# All of the following settings should give the same likelihood and gradients as the full model:
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False)
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_predict_missing_data(self):
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
|
||||||
|
mu1, var1 = m.predict(m.X, full_cov=False)
|
||||||
|
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=False)
|
||||||
|
np.testing.assert_allclose(mu1, mu2)
|
||||||
|
for i in range(var1.shape[1]):
|
||||||
|
np.testing.assert_allclose(var1[:,[i]], var2)
|
||||||
|
|
||||||
|
mu1, var1 = m.predict(m.X, full_cov=True)
|
||||||
|
mu2, var2 = self.m_full.predict(self.m_full.X, full_cov=True)
|
||||||
|
np.testing.assert_allclose(mu1, mu2)
|
||||||
|
for i in range(var1.shape[2]):
|
||||||
|
np.testing.assert_allclose(var1[:,:,i], var2)
|
||||||
|
|
||||||
|
def test_lik_comparisons_m0_s0(self):
|
||||||
|
# Test if the different implementations give the exact same likelihood as the full model.
|
||||||
|
# All of the following settings should give the same likelihood and gradients as the full model:
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=False)
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_lik_comparisons_m1_s1(self):
|
||||||
|
# Test if the different implementations give the exact same likelihood as the full model.
|
||||||
|
# All of the following settings should give the same likelihood and gradients as the full model:
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_lik_comparisons_m0_s1(self):
|
||||||
|
# Test if the different implementations give the exact same likelihood as the full model.
|
||||||
|
# All of the following settings should give the same likelihood and gradients as the full model:
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=self.Y.shape[1])
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_gradients_missingdata(self):
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=False, batchsize=self.Y.shape[1])
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_gradients_missingdata_stochastics(self):
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=1)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=4)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_gradients_stochastics(self):
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=1)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=False, stochastic=True, batchsize=4)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
def test_predict(self):
|
||||||
|
# Test if the different implementations give the exact same likelihood as the full model.
|
||||||
|
# All of the following settings should give the same likelihood and gradients as the full model:
|
||||||
|
m = GPy.models.bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch(self.Y, self.Q, X_variance=False, missing_data=True, stochastic=True, batchsize=self.Y.shape[1])
|
||||||
|
m[:] = self.m_full[:]
|
||||||
|
np.testing.assert_almost_equal(m.log_likelihood(), self.m_full.log_likelihood(), 7)
|
||||||
|
np.testing.assert_allclose(m.gradient, self.m_full.gradient)
|
||||||
|
assert(m.checkgrad())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
#import sys;sys.argv = ['', 'Test.testName']
|
#import sys;sys.argv = ['', 'Test.testName']
|
||||||
|
|
@ -515,16 +515,27 @@ class GradientTests(np.testing.TestCase):
|
||||||
rbflin = GPy.kern.RBF(1) + GPy.kern.White(1)
|
rbflin = GPy.kern.RBF(1) + GPy.kern.White(1)
|
||||||
self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1)
|
self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1)
|
||||||
|
|
||||||
|
|
||||||
def test_GPLVM_rbf_bias_white_kern_2D(self):
|
def test_GPLVM_rbf_bias_white_kern_2D(self):
|
||||||
""" Testing GPLVM with rbf + bias kernel """
|
""" Testing GPLVM with rbf + bias kernel """
|
||||||
N, input_dim, D = 50, 1, 2
|
N, input_dim, D = 50, 1, 2
|
||||||
X = np.random.rand(N, input_dim)
|
X = np.random.rand(N, input_dim)
|
||||||
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05)
|
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim)
|
||||||
K = k.K(X)
|
K = k.K(X)
|
||||||
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
|
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
|
||||||
m = GPy.models.GPLVM(Y, input_dim, kernel=k)
|
m = GPy.models.GPLVM(Y, input_dim, kernel=k)
|
||||||
self.assertTrue(m.checkgrad())
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_SparseGPLVM_rbf_bias_white_kern_2D(self):
|
||||||
|
""" Testing GPLVM with rbf + bias kernel """
|
||||||
|
N, input_dim, D = 50, 1, 2
|
||||||
|
X = np.random.rand(N, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + GPy.kern.Matern32(input_dim) + GPy.kern.Matern52(input_dim)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
|
||||||
|
m = GPy.models.SparseGPLVM(Y, input_dim, kernel=k)
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
def test_BCGPLVM_rbf_bias_white_kern_2D(self):
|
def test_BCGPLVM_rbf_bias_white_kern_2D(self):
|
||||||
""" Testing GPLVM with rbf + bias kernel """
|
""" Testing GPLVM with rbf + bias kernel """
|
||||||
N, input_dim, D = 50, 1, 2
|
N, input_dim, D = 50, 1, 2
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue