EP is back.

This commit is contained in:
Ricardo 2014-05-15 16:36:03 +01:00
parent a03d037736
commit 3d76664af0
4 changed files with 238 additions and 27 deletions

View file

@ -24,6 +24,13 @@ class EP(LatentFunctionInference):
self.old_mutilde, self.old_vtilde = None, None
self._ep_approximation = None
def on_optimization_start(self):
self._ep_approximation = None
def on_optimization_end(self):
# TODO: update approximation in the end as well? Maybe even with a switch?
pass
def inference(self, kern, X, likelihood, Y, Y_metadata=None, Z=None):
num_data, output_dim = X.shape
assert output_dim ==1, "ep in 1D only (for now!)"
@ -47,8 +54,6 @@ class EP(LatentFunctionInference):
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
def expectation_propagation(self, K, Y, likelihood, Y_metadata):
num_data, data_dim = Y.shape
@ -113,4 +118,3 @@ class EP(LatentFunctionInference):
mu_tilde = v_tilde/tau_tilde
return mu, Sigma, mu_tilde, tau_tilde, Z_hat