James' debugging of the EP/GP interface

It seems that the GP-EP algorithm works now.
This commit is contained in:
Ricardo Andrade 2013-02-01 13:45:55 +00:00
parent eb04cbed63
commit f941d629e6
3 changed files with 11 additions and 8 deletions

View file

@ -4,7 +4,7 @@
""" """
Simple Gaussian Processes classification 1D Simple Gaussian Processes classification 1D
Probit likelihood probit likelihood
""" """
import pylab as pb import pylab as pb
import numpy as np import numpy as np
@ -26,7 +26,7 @@ Y = np.hstack([np.ones(N/2),np.repeat(-1,N/2)])[:,None]
kernel = GPy.kern.rbf(1) kernel = GPy.kern.rbf(1)
# Define likelihood # Define likelihood
distribution = GPy.likelihoods.likelihood_functions.Probit() distribution = GPy.likelihoods.likelihood_functions.probit()
likelihood_object = GPy.likelihoods.EP(Y,distribution) likelihood_object = GPy.likelihoods.EP(Y,distribution)
# Model definition # Model definition

View file

@ -27,7 +27,7 @@ class EP(likelihood):
#initial values for the GP variables #initial values for the GP variables
self.Y = np.zeros((self.N,1)) self.Y = np.zeros((self.N,1))
self.variance = np.zeros((self.N,self.N))#np.eye(self.N) self.covariance_matrix = np.eye(self.N)
self.Z = 0 self.Z = 0
self.YYT = None self.YYT = None
@ -50,8 +50,9 @@ class EP(likelihood):
mu_diff_2 = (self.v_/self.tau_ - mu_tilde)**2 mu_diff_2 = (self.v_/self.tau_ - mu_tilde)**2
self.Z = np.sum(np.log(self.Z_hat)) + 0.5*np.sum(np.log(sigma_sum)) + 0.5*np.sum(mu_diff_2/sigma_sum) #Normalization constant, aka Z_ep self.Z = np.sum(np.log(self.Z_hat)) + 0.5*np.sum(np.log(sigma_sum)) + 0.5*np.sum(mu_diff_2/sigma_sum) #Normalization constant, aka Z_ep
self.Y = mu_tilde[:,None] self.Y = mu_tilde[:,None]
self.precsion = self.tau_tilde[:,None] self.YYT = np.dot(self.Y,self.Y.T)
self.precision = self.tau_tilde
self.covariance_matrix = np.diag(1./self.precision) self.covariance_matrix = np.diag(1./self.precision)
def fit_full(self,K): def fit_full(self,K):
@ -61,9 +62,11 @@ class EP(likelihood):
""" """
#Prior distribution parameters: p(f|X) = N(f|0,K) #Prior distribution parameters: p(f|X) = N(f|0,K)
self.tau_tilde = np.zeros(self.N)
self.v_tilde = np.zeros(self.N)
#Initial values - Posterior distribution parameters: q(f|X,Y) = N(f|mu,Sigma) #Initial values - Posterior distribution parameters: q(f|X,Y) = N(f|mu,Sigma)
self.mu = np.zeros(self.N) self.mu = np.zeros(self.N)
self.Sigma = K.copy() - self.variance.copy() self.Sigma = K.copy()
""" """
Initial values - Cavity distribution parameters: Initial values - Cavity distribution parameters:

View file

@ -65,7 +65,7 @@ class GP(model):
self.likelihood._set_params(p[self.kern.Nparam:]) self.likelihood._set_params(p[self.kern.Nparam:])
self.K = self.kern.K(self.X,slices1=self.Xslices) self.K = self.kern.K(self.X,slices1=self.Xslices)
self.K += self.likelihood.variance self.K += self.likelihood.covariance_matrix
self.Ki, self.L, self.Li, self.K_logdet = pdinv(self.K) self.Ki, self.L, self.Li, self.K_logdet = pdinv(self.K)
@ -90,7 +90,7 @@ class GP(model):
For a Gaussian (or direct: TODO) likelihood, no iteration is required: For a Gaussian (or direct: TODO) likelihood, no iteration is required:
this function does nothing this function does nothing
""" """
self.likelihood.fit_full(self.kern.compute(self.X)) self.likelihood.fit_full(self.kern.K(self.X))
self._set_params(self._get_params()) # update the GP self._set_params(self._get_params()) # update the GP
def _model_fit_term(self): def _model_fit_term(self):