mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-04 09:12:38 +02:00
Added Z_tilde contribution for EP, now log_marginal is correct, need to check for var_dtc case
This commit is contained in:
parent
5b4abf4c34
commit
3dba1c05de
3 changed files with 28 additions and 8 deletions
|
|
@ -22,7 +22,7 @@ class ExactGaussianInference(LatentFunctionInference):
|
|||
def __init__(self):
|
||||
pass#self._YYTfactor_cache = caching.cache()
|
||||
|
||||
def inference(self, kern, X, likelihood, Y, mean_function=None, Y_metadata=None, K=None, precision=None, Z=None):
|
||||
def inference(self, kern, X, likelihood, Y, mean_function=None, Y_metadata=None, K=None, precision=None, Z_tilde=None):
|
||||
"""
|
||||
Returns a Posterior class containing essential quantities of the posterior
|
||||
"""
|
||||
|
|
@ -49,11 +49,11 @@ class ExactGaussianInference(LatentFunctionInference):
|
|||
|
||||
log_marginal = 0.5*(-Y.size * log_2_pi - Y.shape[1] * W_logdet - np.sum(alpha * YYT_factor))
|
||||
|
||||
if Z is not None:
|
||||
if Z_tilde is not None:
|
||||
# This is a correction term for the log marginal likelihood
|
||||
# In EP this is log Z_tilde, which is the difference between the
|
||||
# Gaussian marginal and Z_EP
|
||||
log_marginal += Z
|
||||
log_marginal += Z_tilde
|
||||
|
||||
dL_dK = 0.5 * (tdot(alpha) - Y.shape[1] * Wi)
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class EP(EPBase, ExactGaussianInference):
|
|||
#if we've already run EP, just use the existing approximation stored in self._ep_approximation
|
||||
mu, Sigma, mu_tilde, tau_tilde, Z_tilde = self._ep_approximation
|
||||
|
||||
return super(EP, self).inference(kern, X, likelihood, mu_tilde[:,None], mean_function=mean_function, Y_metadata=Y_metadata, precision=1./tau_tilde, K=K, Z=np.log(Z_tilde).sum())
|
||||
return super(EP, self).inference(kern, X, likelihood, mu_tilde[:,None], mean_function=mean_function, Y_metadata=Y_metadata, precision=1./tau_tilde, K=K, Z_tilde=np.log(Z_tilde).sum())
|
||||
|
||||
def expectation_propagation(self, K, Y, likelihood, Y_metadata):
|
||||
|
||||
|
|
@ -63,6 +63,10 @@ class EP(EPBase, ExactGaussianInference):
|
|||
Sigma = K.copy()
|
||||
diag.add(Sigma, 1e-7)
|
||||
|
||||
# Makes computing the sign quicker if we work with numpy arrays rather
|
||||
# than ObsArrays
|
||||
Y = Y.values.copy()
|
||||
|
||||
#Initial values - Marginal moments
|
||||
Z_hat = np.empty(num_data,dtype=np.float64)
|
||||
mu_hat = np.empty(num_data,dtype=np.float64)
|
||||
|
|
@ -91,15 +95,25 @@ class EP(EPBase, ExactGaussianInference):
|
|||
#Cavity distribution parameters
|
||||
tau_cav[i] = 1./Sigma[i,i] - self.eta*tau_tilde[i]
|
||||
v_cav[i] = mu[i]/Sigma[i,i] - self.eta*v_tilde[i]
|
||||
if Y_metadata is not None:
|
||||
# Pick out the relavent metadata for Yi
|
||||
Y_metadata_i = {}
|
||||
for key in Y_metadata.keys():
|
||||
Y_metadata_i[key] = Y_metadata[key][i, :]
|
||||
else:
|
||||
Y_metadata_i = None
|
||||
#Marginal moments
|
||||
Z_hat[i], mu_hat[i], sigma2_hat[i] = likelihood.moments_match_ep(Y[i], tau_cav[i], v_cav[i])#, Y_metadata=None)#=(None if Y_metadata is None else Y_metadata[i]))
|
||||
Z_hat[i], mu_hat[i], sigma2_hat[i] = likelihood.moments_match_ep(Y[i], tau_cav[i], v_cav[i], Y_metadata_i=Y_metadata_i)
|
||||
#Site parameters update
|
||||
delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma[i,i])
|
||||
delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma[i,i])
|
||||
tau_tilde[i] += delta_tau
|
||||
v_tilde[i] += delta_v
|
||||
#Posterior distribution parameters update
|
||||
DSYR(Sigma, Sigma[:,i].copy(), -delta_tau/(1.+ delta_tau*Sigma[i,i]))
|
||||
# DSYR(Sigma, Sigma[:,i].copy(), -delta_tau/(1.+ delta_tau*Sigma[i,i]))
|
||||
# mu = np.dot(Sigma, v_tilde)
|
||||
ci = delta_tau/(1.+ delta_tau*Sigma[i,i])
|
||||
DSYR(Sigma, Sigma[:,i].copy(), -ci)
|
||||
mu = np.dot(Sigma, v_tilde)
|
||||
|
||||
#(re) compute Sigma and mu using full Cholesky decompy
|
||||
|
|
@ -150,7 +164,7 @@ class EPDTC(EPBase, VarDTC):
|
|||
Y_metadata=Y_metadata,
|
||||
precision=tau_tilde,
|
||||
Lm=Lm, dL_dKmm=dL_dKmm,
|
||||
psi0=psi0, psi1=psi1, psi2=psi2, Z=Z_tilde)
|
||||
psi0=psi0, psi1=psi1, psi2=psi2, Z_tilde=np.log(Z_tilde).sum())
|
||||
|
||||
def expectation_propagation(self, Kmm, Kmn, Y, likelihood, Y_metadata):
|
||||
num_data, output_dim = Y.shape
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class VarDTC(LatentFunctionInference):
|
|||
def get_VVTfactor(self, Y, prec):
|
||||
return Y * prec # TODO chache this, and make it effective
|
||||
|
||||
def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None, mean_function=None, precision=None, Lm=None, dL_dKmm=None, psi0=None, psi1=None, psi2=None):
|
||||
def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None, mean_function=None, precision=None, Lm=None, dL_dKmm=None, psi0=None, psi1=None, psi2=None, Z_tilde=None):
|
||||
assert mean_function is None, "inference with a mean function not implemented"
|
||||
|
||||
num_data, output_dim = Y.shape
|
||||
|
|
@ -152,6 +152,12 @@ class VarDTC(LatentFunctionInference):
|
|||
log_marginal = _compute_log_marginal_likelihood(likelihood, num_data, output_dim, precision, het_noise,
|
||||
psi0, A, LB, trYYT, data_fit, Y)
|
||||
|
||||
if Z_tilde is not None:
|
||||
# This is a correction term for the log marginal likelihood
|
||||
# In EP this is log Z_tilde, which is the difference between the
|
||||
# Gaussian marginal and Z_EP
|
||||
log_marginal += Z_tilde
|
||||
|
||||
#noise derivatives
|
||||
dL_dR = _compute_dL_dR(likelihood,
|
||||
het_noise, uncertain_inputs, LB,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue