Merge branch 'devel' of git://github.com/SheffieldML/GPy into devel

This commit is contained in:
James McMurray 2013-09-20 14:29:33 +01:00
commit 5bf20f7bd7
3 changed files with 12 additions and 4 deletions

View file

@ -293,7 +293,7 @@ class SparseGP(GPBase):
Kxx = self.kern.Kdiag(Xnew, which_parts=which_parts) Kxx = self.kern.Kdiag(Xnew, which_parts=which_parts)
var = Kxx - np.sum(Kx * np.dot(Kmmi_LmiBLmi, Kx), 0) var = Kxx - np.sum(Kx * np.dot(Kmmi_LmiBLmi, Kx), 0)
else: else:
# assert which_p.Tarts=='all', "swithching out parts of variational kernels is not implemented" # assert which_parts=='all', "swithching out parts of variational kernels is not implemented"
Kx = self.kern.psi1(self.Z, Xnew, X_variance_new) # , which_parts=which_parts) TODO: which_parts Kx = self.kern.psi1(self.Z, Xnew, X_variance_new) # , which_parts=which_parts) TODO: which_parts
mu = np.dot(Kx, self.Cpsi1V) mu = np.dot(Kx, self.Cpsi1V)
if full_cov: if full_cov:

View file

@ -5,7 +5,7 @@ import exponential
import finite_dimensional import finite_dimensional
import fixed import fixed
import gibbs import gibbs
#import hetero #hetero.py is not commited: omitting for now. JH. import hetero
import hierarchical import hierarchical
import independent_outputs import independent_outputs
import linear import linear

View file

@ -8,7 +8,7 @@ from .. import kern
import itertools import itertools
from matplotlib.colors import colorConverter from matplotlib.colors import colorConverter
from GPy.inference.optimization import SCG from GPy.inference.optimization import SCG
from GPy.util import plot_latent from GPy.util import plot_latent, linalg
from GPy.models.gplvm import GPLVM from GPy.models.gplvm import GPLVM
from GPy.util.plot_latent import most_significant_input_dimensions from GPy.util.plot_latent import most_significant_input_dimensions
from matplotlib import pyplot from matplotlib import pyplot
@ -140,12 +140,20 @@ class BayesianGPLVM(SparseGP, GPLVM):
dpsi0 = -0.5 * self.input_dim * self.likelihood.precision dpsi0 = -0.5 * self.input_dim * self.likelihood.precision
dpsi2 = self.dL_dpsi2[0][None, :, :] # TODO: this may change if we ignore het. likelihoods dpsi2 = self.dL_dpsi2[0][None, :, :] # TODO: this may change if we ignore het. likelihoods
V = self.likelihood.precision * Y V = self.likelihood.precision * Y
#compute CPsi1V
if self.Cpsi1V is None:
psi1V = np.dot(self.psi1.T, self.likelihood.V)
tmp, _ = linalg.dtrtrs(self._Lm, np.asfortranarray(psi1V), lower=1, trans=0)
tmp, _ = linalg.dpotrs(self.LB, tmp, lower=1)
self.Cpsi1V, _ = linalg.dtrtrs(self._Lm, tmp, lower=1, trans=1)
dpsi1 = np.dot(self.Cpsi1V, V.T) dpsi1 = np.dot(self.Cpsi1V, V.T)
start = np.zeros(self.input_dim * 2) start = np.zeros(self.input_dim * 2)
for n, dpsi1_n in enumerate(dpsi1.T[:, :, None]): for n, dpsi1_n in enumerate(dpsi1.T[:, :, None]):
args = (self.kern, self.Z, dpsi0, dpsi1_n, dpsi2) args = (self.kern, self.Z, dpsi0, dpsi1_n.T, dpsi2)
xopt, fopt, neval, status = SCG(f=latent_cost, gradf=latent_grad, x=start, optargs=args, display=False) xopt, fopt, neval, status = SCG(f=latent_cost, gradf=latent_grad, x=start, optargs=args, display=False)
mu, log_S = xopt.reshape(2, 1, -1) mu, log_S = xopt.reshape(2, 1, -1)