From 7af2d62ee60d2e2bd62d2ac0ee1b0e7a1de9f5bf Mon Sep 17 00:00:00 2001 From: James Hensman Date: Fri, 20 Sep 2013 13:03:24 +0100 Subject: [PATCH] do_test_latents appears to be working now --- GPy/core/sparse_gp.py | 2 +- GPy/models/bayesian_gplvm.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index cb96b478..31d2c695 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -292,7 +292,7 @@ class SparseGP(GPBase): Kxx = self.kern.Kdiag(Xnew, which_parts=which_parts) var = Kxx - np.sum(Kx * np.dot(Kmmi_LmiBLmi, Kx), 0) else: - # assert which_p.Tarts=='all', "swithching out parts of variational kernels is not implemented" + # assert which_parts=='all', "swithching out parts of variational kernels is not implemented" Kx = self.kern.psi1(self.Z, Xnew, X_variance_new) # , which_parts=which_parts) TODO: which_parts mu = np.dot(Kx, self.Cpsi1V) if full_cov: diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index e514ad19..e094d915 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -8,7 +8,7 @@ from .. import kern import itertools from matplotlib.colors import colorConverter from GPy.inference.optimization import SCG -from GPy.util import plot_latent +from GPy.util import plot_latent, linalg from GPy.models.gplvm import GPLVM from GPy.util.plot_latent import most_significant_input_dimensions from matplotlib import pyplot @@ -140,12 +140,20 @@ class BayesianGPLVM(SparseGP, GPLVM): dpsi0 = -0.5 * self.input_dim * self.likelihood.precision dpsi2 = self.dL_dpsi2[0][None, :, :] # TODO: this may change if we ignore het. likelihoods V = self.likelihood.precision * Y + + #compute CPsi1V + if self.Cpsi1V is None: + psi1V = np.dot(self.psi1.T, self.likelihood.V) + tmp, _ = linalg.dtrtrs(self._Lm, np.asfortranarray(psi1V), lower=1, trans=0) + tmp, _ = linalg.dpotrs(self.LB, tmp, lower=1) + self.Cpsi1V, _ = linalg.dtrtrs(self._Lm, tmp, lower=1, trans=1) + dpsi1 = np.dot(self.Cpsi1V, V.T) start = np.zeros(self.input_dim * 2) for n, dpsi1_n in enumerate(dpsi1.T[:, :, None]): - args = (self.kern, self.Z, dpsi0, dpsi1_n, dpsi2) + args = (self.kern, self.Z, dpsi0, dpsi1_n.T, dpsi2) xopt, fopt, neval, status = SCG(f=latent_cost, gradf=latent_grad, x=start, optargs=args, display=False) mu, log_S = xopt.reshape(2, 1, -1)