From 83a49f132a96b7c49255b0c0b2ee26b2754ff7e6 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Fri, 6 Dec 2013 09:50:01 -0800 Subject: [PATCH] Added the structure to posterior.py to enable... to enable computation from the posterior mean and variance, instead of the woodbury componnents This iss the first step in being able to use this structre for EP and the laplace approximation. --- GPy/core/gp_base.py | 7 +-- .../latent_function_inference/posterior.py | 60 +++++++++++++++++-- GPy/models/gp_regression.py | 2 +- 3 files changed, 58 insertions(+), 11 deletions(-) diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index 827de434..9466a011 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -183,11 +183,10 @@ class GPBase(Model): m, v = self._raw_predict(Xgrid, which_parts=which_parts) lower = m - 2*np.sqrt(v) upper = m + 2*np.sqrt(v) - Y = self.likelihood.Y + Y = self.Y else: - m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=False) #Compute the exact mean - m_, v_, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=True, num_samples=15000) #Apporximate the percentiles - Y = self.likelihood.data + m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts) #Compute the exact mean + Y = self.Y for d in which_data_ycols: gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index 79db69e5..9ebffff8 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -13,7 +13,7 @@ class Posterior(object): schemes and the model classes. """ - def __init__(self, log_marginal, dL_dK, dL_dtheta_lik, woodbury_chol, woodbury_vector, K): + def __init__(self, log_marginal, dL_dK, dL_dtheta_lik, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None): """ log_marginal: log p(Y|X) DL_dK: d/dK log p(Y|X) @@ -21,17 +21,51 @@ class Posterior(object): woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M K : the proir covariance (required for lazy computation of various quantities) + mean : the posterior mean + cov : the posterior covariance + + Not all of the above need to be supplied! You *must* supply: + + log_marginal + dL_dK + dL_dtheta_lik + K (for lazy computation) + + You may supply either: + + woodbury_chol + woodbury_vector + + Or: + + mean + cov + K_chol (for lazy computation) + + From the supplied quantities, all of the others will be computed on demand (lazy computation) + """ + #obligatory self.log_marginal = log_marginal self.dL_dK = dL_dK self.dL_dtheta_lik = dL_dtheta_lik - self._woodbury_chol = woodbury_chol - self._woodbury_vector = woodbury_vector self._K = K - #these are computed lazily below - self._mean = None - self._covariance = None + if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)): + pass # we have sufficient to compute the posterior + else: + raise ValueError, "insufficient onformation to compute the posterior" + + #option 1: + self._woodbury_chol = woodbury_chol + self._woodbury_vector = woodbury_vector + + #option 2: + self._mean = mean + self._covariance = cov + self._K_chol = K_chol + + #copmute this lazily self._precision = None @property @@ -53,6 +87,20 @@ class Posterior(object): self._precision = np.linalg.inv(self.covariance) return self._precision + @property + def woodbury_chol(self): + if self._woodbury_chol is None: + ??? + else: + return self._woodbury_chol + + @property + def woodbury_vector(self): + if self._woodbury_vector is None: + ??? + else: + return self._woodbury_vector + diff --git a/GPy/models/gp_regression.py b/GPy/models/gp_regression.py index 4ebf2e25..a833d11b 100644 --- a/GPy/models/gp_regression.py +++ b/GPy/models/gp_regression.py @@ -27,7 +27,7 @@ class GPRegression(GP): likelihood = likelihoods.Gaussian() - super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='gp regression') + super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='gp_regression') def getstate(self): return GP.getstate(self)