removed marginal and derivative from posterior object

This commit is contained in:
James Hensman 2014-01-27 15:37:20 +00:00
parent 052b888793
commit ca1cb4eb22
5 changed files with 8 additions and 14 deletions

View file

@ -60,10 +60,10 @@ class GP(Model):
self.parameters_changed() self.parameters_changed()
def parameters_changed(self): def parameters_changed(self):
self.posterior = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y) self.posterior, self._log_marginal_likelihood, self._dL_dK = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y)
def log_likelihood(self): def log_likelihood(self):
return self.posterior.log_marginal return self._log_marginal_likelihood
def dL_dtheta_K(self): def dL_dtheta_K(self):
return self.kern.dK_dtheta(self.posterior.dL_dK, self.X) return self.kern.dK_dtheta(self.posterior.dL_dK, self.X)

View file

@ -6,7 +6,6 @@ import pylab as pb
from gp import GP from gp import GP
from parameterization.param import Param from parameterization.param import Param
from ..inference.latent_function_inference import varDTC from ..inference.latent_function_inference import varDTC
from posterior import Posterior
class SparseGP(GP): class SparseGP(GP):
""" """

View file

@ -53,6 +53,6 @@ class ExactGaussianInference(object):
likelihood.update_gradients(np.diag(dL_dK)) likelihood.update_gradients(np.diag(dL_dK))
return Posterior(log_marginal, dL_dK, LW, alpha, K) return Posterior(LW, alpha, K), log_marginal, dL_dK

View file

@ -14,10 +14,8 @@ class Posterior(object):
schemes and the model classes. schemes and the model classes.
""" """
def __init__(self, log_marginal, dL_dK, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None): def __init__(self, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None):
""" """
log_marginal: log p(Y|X)
dL_dK: d/dK log p(Y|X)
woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K
woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M
K : the proir covariance (required for lazy computation of various quantities) K : the proir covariance (required for lazy computation of various quantities)
@ -26,8 +24,6 @@ class Posterior(object):
Not all of the above need to be supplied! You *must* supply: Not all of the above need to be supplied! You *must* supply:
log_marginal
dL_dK
K (for lazy computation) K (for lazy computation)
You may supply either: You may supply either:
@ -46,8 +42,6 @@ class Posterior(object):
From the supplied quantities, all of the others will be computed on demand (lazy computation) From the supplied quantities, all of the others will be computed on demand (lazy computation)
""" """
#obligatory #obligatory
self.log_marginal = log_marginal
self.dL_dK = dL_dK
self._K = K self._K = K
if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)): if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)):

View file

@ -2,10 +2,11 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
from posterior import Posterior from posterior import Posterior
from .../util.linalg import pdinv, dpotrs, tdot from ...util.linalg import pdinv, dpotrs, tdot
import numpy as np
log_2_pi = np.log(2*np.pi) log_2_pi = np.log(2*np.pi)
class DTCVar(object): class VarDTC(object):
""" """
An object for inference when the likelihood is Gaussian, but we want to do sparse inference. An object for inference when the likelihood is Gaussian, but we want to do sparse inference.
@ -19,7 +20,7 @@ class DTCVar(object):
self._YYTfactor_cache = caching.cache() self._YYTfactor_cache = caching.cache()
self.const_jitter = 1e-6 self.const_jitter = 1e-6
def get_YYTfactor(self, Y): def get_YYTfactor(self, Y):
""" """
find a matrix L which satisfies LLT = YYT. find a matrix L which satisfies LLT = YYT.