some noodling around in the likelihoods

This commit is contained in:
James Hensman 2013-12-10 14:51:11 -08:00
parent d8a627c1d8
commit 9011d8fe2f
2 changed files with 22 additions and 54 deletions

View file

@ -1,4 +1,4 @@
# Copyright (c) 2012, 2013 Ricardo Andrade
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
@ -30,18 +30,22 @@ class Gaussian(Likelihood):
analytical_variance = False
analytical_mean = False
super(Gaussian, self).__init__(gp_link, analytical_mean, analytical_variance, name=name)
super(Gaussian, self).__init__(gp_link, name=name)
self.variance = Param('variance', variance)
self.add_parameter(self.variance)
if isinstance(gp_link , link_functions.Identity):
if isinstance(gp_link, link_functions.Identity):
self.log_concave = True
def covariance_matrix(self, Y, Y_metadata=None):
return np.eye(Y.shape[0]) * self.variance
def _gradients(self, partial):
"""
Return the derivative of the log marginal likelihood wrt self.variance,
given the appropriate partial derivative
"""
return np.sum(partial)
def _preprocess_values(self, Y):
@ -51,7 +55,7 @@ class Gaussian(Likelihood):
"""
return Y
def _moments_match_analytical(self, data_i, tau_i, v_i):
def _moments_match_ep(self, data_i, tau_i, v_i):
"""
Moments match of the marginal approximation in EP algorithm
@ -65,11 +69,11 @@ class Gaussian(Likelihood):
Z_hat = 1./np.sqrt(2.*np.pi*sum_var)*np.exp(-.5*(data_i - v_i/tau_i)**2./sum_var)
return Z_hat, mu_hat, sigma2_hat
def _predictive_mean_analytical(self, mu, sigma):
def _predictive_mean(self, mu, sigma):
new_sigma2 = self.predictive_variance(mu, sigma)
return new_sigma2*(mu/sigma**2 + self.gp_link.transf(mu)/self.variance)
def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None):
def _predictive_variance(self, mu, sigma, predictive_mean=None):
return 1./(1./self.variance + 1./sigma**2)
def pdf_link(self, link_f, y, extra_data=None):

View file

@ -1,4 +1,4 @@
# Copyright (c) 2012, 2013 Ricardo Andrade
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
@ -19,28 +19,17 @@ class Likelihood(Parameterized):
To use this class, inherrit and define missing functionality.
The minimum required funciotnality is... TODO
"""
def __init__(self,gp_link,analytical_mean=False,analytical_variance=False, name='likelihood_base'):
"""
What are analytical_mean, analyitical_variance? TODO
To enable use with EP, ...
To enable use with Laplace approximation, ...
For exact Gaussian inference, define ...
"""
def __init__(self, gp_link, name):
super(Likelihood, self).__init__(name)
assert isinstance(gp_link,link_functions.GPTransformation), "gp_link is not a valid GPTransformation."
self.gp_link = gp_link
self.analytical_mean = analytical_mean
self.analytical_variance = analytical_variance
if self.analytical_mean:
self.moments_match = self._moments_match_analytical
self.predictive_mean = self._predictive_mean_analytical
else:
self.moments_match = self._moments_match_numerical
self.predictive_mean = self._predictive_mean_numerical
if self.analytical_variance:
self.predictive_variance = self._predictive_variance_analytical
else:
self.predictive_variance = self._predictive_variance_numerical
self.log_concave = False
def _gradients(self,partial):
@ -56,12 +45,6 @@ class Likelihood(Parameterized):
"""
return Y
def _moments_match_analytical(self,obs,tau,v):
"""
If available, this function computes the moments analytically.
"""
raise NotImplementedError
def log_predictive_density(self, y_test, mu_star, var_star):
"""
Calculation of the log predictive density
@ -90,7 +73,7 @@ class Likelihood(Parameterized):
p_ystar = scaled_p_ystar/np.sqrt(2*np.pi*var_star)
return np.log(p_ystar)
def _moments_match_numerical(self,obs,tau,v):
def _moments_match_ep(self,obs,tau,v):
"""
Calculation of moments using quadrature
@ -124,27 +107,7 @@ class Likelihood(Parameterized):
return z, mean, variance
def _predictive_mean_analytical(self,mu,sigma):
"""
Predictive mean
.. math::
E(Y^{*}|Y) = E( E(Y^{*}|f^{*}, Y) )
If available, this function computes the predictive mean analytically.
"""
raise NotImplementedError
def _predictive_variance_analytical(self,mu,sigma):
"""
Predictive variance
.. math::
V(Y^{*}| Y) = E( V(Y^{*}|f^{*}, Y) ) + V( E(Y^{*}|f^{*}, Y) )
If available, this function computes the predictive variance analytically.
"""
raise NotImplementedError
def _predictive_mean_numerical(self,mu,variance):
def _predictive_mean(self,mu,variance):
"""
Quadrature calculation of the predictive mean: E(Y_star|Y) = E( E(Y_star|f_star, Y) )
@ -159,7 +122,7 @@ class Likelihood(Parameterized):
return mean
def _predictive_variance_numerical(self,mu,variance,predictive_mean=None):
def _predictive_variance(self,mu,variance,predictive_mean=None):
"""
Numerical approximation to the predictive variance: V(Y_star)
@ -376,6 +339,7 @@ class Likelihood(Parameterized):
assert dlogpdf_dtheta.shape[1] == len(self._get_param_names())
assert dlogpdf_df_dtheta.shape[1] == len(self._get_param_names())
assert d2logpdf_df2_dtheta.shape[1] == len(self._get_param_names())
return dlogpdf_dtheta, dlogpdf_df_dtheta, d2logpdf_df2_dtheta
def predictive_values(self, mu, var, full_cov=False, sampling=False, num_samples=10000):