fixed up the Gaussian likelihood a little

This commit is contained in:
James Hensman 2013-12-19 16:19:23 +00:00
parent 0733886ba0
commit 8cad49ce13
3 changed files with 13 additions and 7 deletions

View file

@ -3,7 +3,7 @@
import numpy as np
from gp_base import GPBase
from ..util.linalg import dtrtrs
from ..util.linalg import dtrtrs, tdot
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
from .. import likelihoods

View file

@ -2,7 +2,7 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ...util.linalg import pdinv, dpotrs
from ...util.linalg import pdinv, dpotrs, tdot, dtrtrs
class Posterior(object):
"""
@ -33,7 +33,7 @@ class Posterior(object):
K (for lazy computation)
You may supply either:
cc
woodbury_chol
woodbury_vector
@ -46,7 +46,6 @@ class Posterior(object):
Of course, you can supply more than that, but this class will lazily compute all other quantites on demand.
From the supplied quantities, all of the others will be computed on demand (lazy computation)
"""
#obligatory
self.log_marginal = log_marginal
@ -80,7 +79,7 @@ class Posterior(object):
@property
def covariance(self):
if self._covariance is None:
LiK, _ = dpotrs(self._woodbury_chol, self._K)
LiK, _ = dtrtrs(self.woodbury_chol, self._K, lower=1)
self._covariance = self._K - tdot(LiK.T)
return self._covariance

View file

@ -80,6 +80,13 @@ class Gaussian(Likelihood):
Z_hat = 1./np.sqrt(2.*np.pi*sum_var)*np.exp(-.5*(data_i - v_i/tau_i)**2./sum_var)
return Z_hat, mu_hat, sigma2_hat
def predictive_values(self, mu, var, full_cov=False):
if full_cov:
low, up = mu - np.diag(var)[:,None], mu + np.diag(var)[:,None]
else:
low, up = mu - var, mu + var
return mu, var, low, up
def predictive_mean(self, mu, sigma):
#new_sigma2 = self.predictive_variance(mu, sigma)
#return new_sigma2*(mu/sigma**2 + self.gp_link.transf(mu)/self.variance)