diff --git a/GPy/models/BGPLVM.py b/GPy/models/BGPLVM.py index db147944..ffa7df54 100644 --- a/GPy/models/BGPLVM.py +++ b/GPy/models/BGPLVM.py @@ -5,10 +5,12 @@ import numpy as np import pylab as pb import sys, pdb from GPLVM import GPLVM -from sparse_GP_regression import sparse_GP_regression +from sparse_GP import sparse_GP from GPy.util.linalg import pdinv +from ..likelihoods import Gaussian +from .. import kern -class Bayesian_GPLVM(sparse_GP_regression, GPLVM): +class Bayesian_GPLVM(sparse_GP, GPLVM): """ Bayesian Gaussian Process Latent Variable Model @@ -20,15 +22,23 @@ class Bayesian_GPLVM(sparse_GP_regression, GPLVM): :type init: 'PCA'|'random' """ - def __init__(self, Y, Q, init='PCA', **kwargs): + def __init__(self, Y, Q, init='PCA', M=10, Z=None, **kwargs): X = self.initialise_latent(init, Q, Y) - S = np.ones_like(X) * 1e-2# - sparse_GP_regression.__init__(self, X, Y, X_uncertainty = S, **kwargs) + + if Z is None: + Z = np.random.permutation(X.copy())[:M] + else: + assert Z.shape[1]==X.shape[1] + + kernel = kern.rbf(Q) + kern.white(Q) + + S = np.ones_like(X) * 1e-2# + sparse_GP.__init__(self, X, Gaussian(Y), X_uncertainty = S, Z=Z,**kwargs) def _get_param_names(self): X_names = sum([['X_%i_%i'%(n,q) for n in range(self.N)] for q in range(self.Q)],[]) S_names = sum([['S_%i_%i'%(n,q) for n in range(self.N)] for q in range(self.Q)],[]) - return (X_names + S_names + sparse_GP_regression._get_param_names(self)) + return (X_names + S_names + sparse_GP._get_param_names(self)) def _get_params(self): """ @@ -40,13 +50,13 @@ class Bayesian_GPLVM(sparse_GP_regression, GPLVM): =============================================================== """ - return np.hstack((self.X.flatten(), self.X_uncertainty.flatten(), sparse_GP_regression._get_params(self))) + return np.hstack((self.X.flatten(), self.X_uncertainty.flatten(), sparse_GP._get_params(self))) def _set_params(self,x): N, Q = self.N, self.Q self.X = x[:self.X.size].reshape(N,Q).copy() self.X_uncertainty = x[(N*Q):(2*N*Q)].reshape(N,Q).copy() - sparse_GP_regression._set_params(self, x[(2*N*Q):]) + sparse_GP._set_params(self, x[(2*N*Q):]) def dL_dmuS(self): dL_dmu_psi0, dL_dS_psi0 = self.kern.dpsi1_dmuS(self.dL_dpsi1,self.Z,self.X,self.X_uncertainty) @@ -58,5 +68,5 @@ class Bayesian_GPLVM(sparse_GP_regression, GPLVM): return np.hstack((dL_dmu.flatten(), dL_dS.flatten())) def _log_likelihood_gradients(self): - return np.hstack((self.dL_dmuS().flatten(), sparse_GP_regression._log_likelihood_gradients(self))) + return np.hstack((self.dL_dmuS().flatten(), sparse_GP._log_likelihood_gradients(self))) diff --git a/GPy/models/__init__.py b/GPy/models/__init__.py index 2269610d..9cc8fa68 100644 --- a/GPy/models/__init__.py +++ b/GPy/models/__init__.py @@ -9,6 +9,6 @@ from sparse_GP_regression import sparse_GP_regression from GPLVM import GPLVM from warped_GP import warpedGP # TODO: from generalized_FITC import generalized_FITC -#from sparse_GPLVM import sparse_GPLVM +from sparse_GPLVM import sparse_GPLVM #from uncollapsed_sparse_GP import uncollapsed_sparse_GP -#from BGPLVM import Bayesian_GPLVM +from BGPLVM import Bayesian_GPLVM diff --git a/GPy/models/sparse_GP.py b/GPy/models/sparse_GP.py index 7252f085..f35b3918 100644 --- a/GPy/models/sparse_GP.py +++ b/GPy/models/sparse_GP.py @@ -65,7 +65,7 @@ class sparse_GP(GP): # kernel computations, using BGPLVM notation self.Kmm = self.kern.K(self.Z) if self.has_uncertain_inputs: - self.psi0 = self.kern.psi0(self.Z,self.X, self.X_uncerTainty) + self.psi0 = self.kern.psi0(self.Z,self.X, self.X_uncertainty) self.psi1 = self.kern.psi1(self.Z,self.X, self.X_uncertainty).T self.psi2 = self.kern.psi2(self.Z,self.X, self.X_uncertainty) if self.likelihood.is_heteroscedastic: