various merge conflicts from the newGP branch

This commit is contained in:
James Hensman 2013-02-04 16:15:54 +00:00
parent 687631f719
commit bbc7bd8aca
4 changed files with 18 additions and 18 deletions

View file

@ -17,7 +17,7 @@ K = k.K(X)
Y = np.random.multivariate_normal(np.zeros(N),K,D).T
# k = GPy.kern.rbf(Q) + GPy.kern.bias(Q) + GPy.kern.white(Q, 0.00001)
k = GPy.kern.linear(Q, ARD = False) + GPy.kern.white(Q, 0.00001)
k = GPy.kern.rbf(Q, ARD = False) + GPy.kern.white(Q, 0.00001)
m = GPy.models.Bayesian_GPLVM(Y, Q, kernel = k, M=M)
m.constrain_positive('(rbf|bias|noise|white|S)')
# m.constrain_fixed('S', 1)

View file

@ -33,7 +33,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
kernel = kern.rbf(Q) + kern.white(Q)
S = np.ones_like(X) * 1e-2#
sparse_GP.__init__(self, X, Gaussian(Y), X_uncertainty = S, Z=Z,**kwargs)
sparse_GP.__init__(self, X, Gaussian(Y), X_uncertainty=S, Z=Z,**kwargs)
def _get_param_names(self):
X_names = sum([['X_%i_%i'%(n,q) for n in range(self.N)] for q in range(self.Q)],[])
@ -46,7 +46,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
The resulting 1-D array has this structure:
===============================================================
| mu | S | Z | beta | theta |
| mu | S | Z | theta | beta |
===============================================================
"""

View file

@ -8,9 +8,10 @@ import sys, pdb
from .. import kern
from ..core import model
from ..util.linalg import pdinv, PCA
from GP_regression import GP_regression
from GP import GP
from ..likelihoods import Gaussian
class GPLVM(GP_regression):
class GPLVM(GP):
"""
Gaussian Process Latent Variable Model
@ -22,10 +23,13 @@ class GPLVM(GP_regression):
:type init: 'PCA'|'random'
"""
def __init__(self, Y, Q, init='PCA', X = None, **kwargs):
def __init__(self, Y, Q, init='PCA', X = None, kernel=None, **kwargs):
if X is None:
X = self.initialise_latent(init, Q, Y)
GP_regression.__init__(self, X, Y, **kwargs)
if kernel is None:
kernel = kern.rbf(Q) + kern.bias(Q)
likelihood = Gaussian(Y)
GP.__init__(self, X, likelihood, kernel, **kwargs)
def initialise_latent(self, init, Q, Y):
if init == 'PCA':
@ -34,23 +38,19 @@ class GPLVM(GP_regression):
return np.random.randn(Y.shape[0], Q)
def _get_param_names(self):
return (sum([['X_%i_%i'%(n,q) for n in range(self.N)] for q in range(self.Q)],[])
+ self.kern._get_param_names_transformed())
return sum([['X_%i_%i'%(n,q) for n in range(self.N)] for q in range(self.Q)],[]) + GP._get_param_names(self)
def _get_params(self):
return np.hstack((self.X.flatten(), self.kern._get_params_transformed()))
return np.hstack((self.X.flatten(), GP._get_params(self)))
def _set_params(self,x):
self.X = x[:self.X.size].reshape(self.N,self.Q).copy()
GP_regression._set_params(self, x[self.X.size:])
GP._set_params(self, x[self.X.size:])
def _log_likelihood_gradients(self):
dL_dK = self.dL_dK()
dL_dX = 2.*self.kern.dK_dX(self.dL_dK,self.X)
dL_dtheta = self.kern.dK_dtheta(dL_dK,self.X)
dL_dX = 2*self.kern.dK_dX(dL_dK,self.X)
return np.hstack((dL_dX.flatten(),dL_dtheta))
return np.hstack((dL_dX.flatten(),GP._log_likelihood_gradients(self)))
def plot(self):
assert self.Y.shape[1]==2

View file

@ -160,8 +160,8 @@ class GradientTests(unittest.TestCase):
Y = np.hstack([np.ones(N/2),np.repeat(-1,N/2)])[:,None]
kernel = GPy.kern.rbf(1)
distribution = GPy.likelihoods.likelihood_functions.probit()
likelihood = GPy.likelihoods.EP(Y,distribution)
m = GPy.models.GP(X,kernel,likelihood=likelihood)
likelihood = GPy.likelihoods.EP(Y, distribution)
m = GPy.models.GP(X, likelihood, kernel)
m.ensure_default_constraints()
self.assertTrue(m.EPEM)