diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index cffc5e28..80e69b81 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -1,13 +1,53 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) -def exact_gaussian_inference(K, likelihood, Y, Y_metadata=None): +from posterior import Posterior +from .../util.linalg import pdinv, dpotrs, tdot +log_2_pi = np.log(2*np.pi) - Wi, LW, LWi, W_logdet = pdinv(K + likelhood.covariance(Y, Y_metadata)) +class exact_gaussian_inference(object): + """ + An object for inference when the likelihood is Gaussian. + + The function self.inference returns a Posterior object, which summarizes + the posterior. + + For efficiency, we sometimes work with the cholesky of Y*Y.T. To save repeatedly recomputing this, we cache it. + + """ + def __init__(self): + self._YYTfactor_cache = caching.cache() + + def get_YYTfactor(self, Y): + """ + find a matrix L which satisfies LLT = YYT. + + Note that L may have fewer columns than Y. + """ + N, D = Y.shape + if (N>D): + return Y + else: + #if Y in self.cache, return self.Cache[Y], else stor Y in cache and return L. + raise NotImplementedError, 'TODO' #TODO + + def inference(self, K, likelihood, Y, Y_metadata=None): + """ + Returns a Posterior class containing essential quantities of the posterior + """ + YYT_factor = self.get_YYTfactor(Y) + + Wi, LW, LWi, W_logdet = pdinv(K + likelhood.covariance(Y, Y_metadata)) + + alpha, _ = dpotrs(LW, YYT_factor, lower=1) + + dL_dK = 0.5 * (tdot(alpha) - Y.shape[1] * Wi) + + log_marginal = 0.5*(-Y.size * log_2_pi - Y.shape[1] * W_logdet - np.sum(alpha * YYT_factor.T) + + dL_dtheta_lik = likelihood.dL_dtheta(np.diag(dL_dK)) + + return Posterior(log_marginal, dL_DK, dL_dtheta_lik, LW, alpha, K): - alpha, _ = dpotrs(LW, YYT_factor, lower=1) - dL_dK = 0.5 * (tdot(alpha) - Y.shape[1] * Wi) - log_marginal = (-0.5 * Y.size * np.log(2.*np.pi) - - 0.5 * Y.shape[1] * W_logdet + np.sum(np.square(alpha)) diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index ecaa3f2e..6d7f5504 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -1,33 +1,58 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + import numpy as np class Posterior(object): """ - An object to represent a Gaussian posterior over latent function values. - """ - def __init__(self, log_marginal, dL_dmean=None, cov=None, prec=None): - self._log_marginal = log_marginal + An object to represent a Gaussian posterior over latent function values. + this may be computed exactly for Gaussian likelihoods, or approximated for + non-Gaussian likelihoods. - #TODO: accept the init arguments, make sure we've got enough information to compute everything. + The purpose of this clas is to serve as an interface between the inference + schemes and the model classes. + + """ + def __init__(self, log_marginal, dLM_DK, dLM_dtheta_lik, woodbury_chol, woodbury_mean, K): + """ + log_marginal: log p(Y|X) + DLM_dK: d/dK log p(Y|X) + dLM_dtheta_lik : d/dtheta log p(Y|X) (where theta are the parameters of the likelihood) + woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K + woodbury_mean : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M + K : the proir covariance (required for lazy computation of various quantities) + """ + self.log_marginal = log_marginal + self.dLM_DK = dLM_DK + self.dLM_dtheta_lik = _dLM_dtheta_lik + self._woodbury_chol = woodbury_chol + self._woodbury_mean = woodbury_mean + self._K = K + + #these are computed lazily below + self._mean = None + self._covariance = None + self._precision = None @property def mean(self): if self._mean is None: - self._mean = ?? + self._mean = np.dot(self._K, self._woodbury_mean) return self._mean @property def covariance(self): if self._covariance is None: - self._covariance = ?? + LiK, _ = dpotrs + self._covariance = self._K - tdot(LiK.T) return self._covariance @property def precision(self): if self._precision is None: - self._precision = ?? + self._precision = np.linalg.inv(self.covariance) return self._precision - @prop diff --git a/GPy/likelihoods/.DS_Store b/GPy/likelihoods/.DS_Store index 69fb4861..8228ae90 100644 Binary files a/GPy/likelihoods/.DS_Store and b/GPy/likelihoods/.DS_Store differ