fixed up the Gaussian likelihood a little

This commit is contained in:
James Hensman 2013-12-19 16:19:23 +00:00
parent 0733886ba0
commit 8cad49ce13
3 changed files with 13 additions and 7 deletions

View file

@ -3,7 +3,7 @@
import numpy as np import numpy as np
from gp_base import GPBase from gp_base import GPBase
from ..util.linalg import dtrtrs from ..util.linalg import dtrtrs, tdot
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
from .. import likelihoods from .. import likelihoods

View file

@ -2,7 +2,7 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np import numpy as np
from ...util.linalg import pdinv, dpotrs from ...util.linalg import pdinv, dpotrs, tdot, dtrtrs
class Posterior(object): class Posterior(object):
""" """
@ -26,19 +26,19 @@ class Posterior(object):
cov : the posterior covariance cov : the posterior covariance
Not all of the above need to be supplied! You *must* supply: Not all of the above need to be supplied! You *must* supply:
log_marginal log_marginal
dL_dK dL_dK
dL_dtheta_lik dL_dtheta_lik
K (for lazy computation) K (for lazy computation)
You may supply either: You may supply either:
cc
woodbury_chol woodbury_chol
woodbury_vector woodbury_vector
Or: Or:
mean mean
cov cov
K_chol (for lazy computation) K_chol (for lazy computation)
@ -46,7 +46,6 @@ class Posterior(object):
Of course, you can supply more than that, but this class will lazily compute all other quantites on demand. Of course, you can supply more than that, but this class will lazily compute all other quantites on demand.
From the supplied quantities, all of the others will be computed on demand (lazy computation) From the supplied quantities, all of the others will be computed on demand (lazy computation)
""" """
#obligatory #obligatory
self.log_marginal = log_marginal self.log_marginal = log_marginal
@ -80,7 +79,7 @@ class Posterior(object):
@property @property
def covariance(self): def covariance(self):
if self._covariance is None: if self._covariance is None:
LiK, _ = dpotrs(self._woodbury_chol, self._K) LiK, _ = dtrtrs(self.woodbury_chol, self._K, lower=1)
self._covariance = self._K - tdot(LiK.T) self._covariance = self._K - tdot(LiK.T)
return self._covariance return self._covariance

View file

@ -80,6 +80,13 @@ class Gaussian(Likelihood):
Z_hat = 1./np.sqrt(2.*np.pi*sum_var)*np.exp(-.5*(data_i - v_i/tau_i)**2./sum_var) Z_hat = 1./np.sqrt(2.*np.pi*sum_var)*np.exp(-.5*(data_i - v_i/tau_i)**2./sum_var)
return Z_hat, mu_hat, sigma2_hat return Z_hat, mu_hat, sigma2_hat
def predictive_values(self, mu, var, full_cov=False):
if full_cov:
low, up = mu - np.diag(var)[:,None], mu + np.diag(var)[:,None]
else:
low, up = mu - var, mu + var
return mu, var, low, up
def predictive_mean(self, mu, sigma): def predictive_mean(self, mu, sigma):
#new_sigma2 = self.predictive_variance(mu, sigma) #new_sigma2 = self.predictive_variance(mu, sigma)
#return new_sigma2*(mu/sigma**2 + self.gp_link.transf(mu)/self.variance) #return new_sigma2*(mu/sigma**2 + self.gp_link.transf(mu)/self.variance)