mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 12:32:40 +02:00
fixes to EP
This commit is contained in:
parent
1ed7d73219
commit
77d08a7d6f
7 changed files with 36 additions and 32 deletions
|
|
@ -10,7 +10,7 @@ from model import Model
|
|||
from parameterization import ObservableArray
|
||||
from .. import likelihoods
|
||||
from ..likelihoods.gaussian import Gaussian
|
||||
from ..inference.latent_function_inference import exact_gaussian_inference
|
||||
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
|
||||
from parameterization.variational import VariationalPosterior
|
||||
|
||||
class GP(Model):
|
||||
|
|
@ -56,7 +56,7 @@ class GP(Model):
|
|||
if isinstance(likelihood, likelihoods.Gaussian) or isinstance(likelihood, likelihoods.MixedNoise):
|
||||
inference_method = exact_gaussian_inference.ExactGaussianInference()
|
||||
else:
|
||||
inference_method = expectation_propagation
|
||||
inference_method = expectation_propagation.EP()
|
||||
print "defaulting to ", inference_method, "for latent function inference"
|
||||
self.inference_method = inference_method
|
||||
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ def toy_linear_1d_classification_laplace(seed=default_seed, optimize=True, plot=
|
|||
|
||||
likelihood = GPy.likelihoods.Bernoulli()
|
||||
laplace_inf = GPy.inference.latent_function_inference.Laplace()
|
||||
kernel = GPy.kern.rbf(1)
|
||||
kernel = GPy.kern.RBF(1)
|
||||
|
||||
# Model definition
|
||||
m = GPy.core.GP(data['X'], Y, kernel=kernel, likelihood=likelihood, inference_method=laplace_inf)
|
||||
|
|
|
|||
|
|
@ -318,7 +318,7 @@ def toy_ARD(max_iters=1000, kernel_type='linear', num_samples=300, D=4, optimize
|
|||
Y /= Y.std()
|
||||
|
||||
if kernel_type == 'linear':
|
||||
kernel = GPy.kern.linear(X.shape[1], ARD=1)
|
||||
kernel = GPy.kern.Linear(X.shape[1], ARD=1)
|
||||
elif kernel_type == 'rbf_inv':
|
||||
kernel = GPy.kern.RBF_inv(X.shape[1], ARD=1)
|
||||
else:
|
||||
|
|
@ -357,7 +357,7 @@ def toy_ARD_sparse(max_iters=1000, kernel_type='linear', num_samples=300, D=4, o
|
|||
Y /= Y.std()
|
||||
|
||||
if kernel_type == 'linear':
|
||||
kernel = GPy.kern.linear(X.shape[1], ARD=1)
|
||||
kernel = GPy.kern.Linear(X.shape[1], ARD=1)
|
||||
elif kernel_type == 'rbf_inv':
|
||||
kernel = GPy.kern.RBF_inv(X.shape[1], ARD=1)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ etc.
|
|||
|
||||
from exact_gaussian_inference import ExactGaussianInference
|
||||
from laplace import Laplace
|
||||
expectation_propagation = 'foo' # TODO
|
||||
from GPy.inference.latent_function_inference.var_dtc import VarDTC
|
||||
from expectation_propagation import EP
|
||||
from dtc import DTC
|
||||
from fitc import FITC
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import numpy as np
|
||||
from scipy import stats
|
||||
from ..util.linalg import pdinv,mdot,jitchol,chol_inv,DSYR,tdot,dtrtrs
|
||||
from likelihood import likelihood
|
||||
from ...util.linalg import pdinv,jitchol,DSYR,tdot,dtrtrs, dpotrs
|
||||
from posterior import Posterior
|
||||
log_2_pi = np.log(2*np.pi)
|
||||
|
||||
class EP(object):
|
||||
def __init__(self, epsilon=1e-6, eta=1., delta=1.):
|
||||
|
|
@ -28,30 +28,30 @@ class EP(object):
|
|||
|
||||
K = kern.K(X)
|
||||
|
||||
mu_tilde, tau_tilde = self.expectation_propagation()
|
||||
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self.expectation_propagation(K, Y, likelihood, Y_metadata)
|
||||
|
||||
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde)
|
||||
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde))
|
||||
|
||||
alpha, _ = dpotrs(LW, mu_tilde, lower=1)
|
||||
|
||||
log_marginal = 0.5*(-num_data * log_2_pi - W_logdet - np.sum(alpha * mu_tilde))
|
||||
log_marginal = 0.5*(-num_data * log_2_pi - W_logdet - np.sum(alpha * mu_tilde)) # TODO: add log Z_hat??
|
||||
|
||||
dL_dK = 0.5 * (tdot(alpha[:,None]) - Wi)
|
||||
|
||||
#TODO: what abot derivatives of the likelihood parameters?
|
||||
dL_dthetaL = np.zeros(likelihood.size)#TODO: derivatives of the likelihood parameters
|
||||
|
||||
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK}
|
||||
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
|
||||
|
||||
|
||||
|
||||
def expectation_propagation(self, K, Y, Y_metadata, likelihood)
|
||||
def expectation_propagation(self, K, Y, likelihood, Y_metadata):
|
||||
|
||||
num_data, data_dim = Y.shape
|
||||
assert data_dim == 1, "This EP methods only works for 1D outputs"
|
||||
|
||||
|
||||
#Initial values - Posterior distribution parameters: q(f|X,Y) = N(f|mu,Sigma)
|
||||
mu = np.zeros(self.num_data)
|
||||
mu = np.zeros(num_data)
|
||||
Sigma = K.copy()
|
||||
|
||||
#Initial values - Marginal moments
|
||||
|
|
@ -61,33 +61,32 @@ class EP(object):
|
|||
|
||||
#initial values - Gaussian factors
|
||||
if self.old_mutilde is None:
|
||||
tau_tilde, mu_tilde, v_tilde = np.zeros((3, num_data, num_data))
|
||||
tau_tilde, mu_tilde, v_tilde = np.zeros((3, num_data))
|
||||
else:
|
||||
assert old_mutilde.size == num_data, "data size mis-match: did you change the data? try resetting!"
|
||||
mu_tilde, v_tilde = self.old_mutilde, self.old_vtilde
|
||||
tau_tilde = v_tilde/mu_tilde
|
||||
|
||||
#Approximation
|
||||
epsilon_np1 = self.epsilon + 1.
|
||||
epsilon_np2 = self.epsilon + 1.
|
||||
tau_diff = self.epsilon + 1.
|
||||
v_diff = self.epsilon + 1.
|
||||
iterations = 0
|
||||
while (epsilon_np1 > self.epsilon) or (epsilon_np2 > self.epsilon):
|
||||
while (tau_diff > self.epsilon) or (v_diff > self.epsilon):
|
||||
update_order = np.random.permutation(num_data)
|
||||
for i in update_order:
|
||||
#Cavity distribution parameters
|
||||
tau_cav = 1./Sigma[i,i] - self.eta*tau_tilde[i]
|
||||
v_cav = mu[i]/Sigma[i,i] - self.eta*v_tilde[i]
|
||||
#Marginal moments
|
||||
Z_hat[i], mu_hat[i], sigma2_hat[i] = likelihood.moments_match(Y[i], tau_cav, v_cav, Y_metadata=(None if Y_metadata is None else Y_metadata[i]))
|
||||
Z_hat[i], mu_hat[i], sigma2_hat[i] = likelihood.moments_match_ep(Y[i], tau_cav, v_cav)#, Y_metadata=None)#=(None if Y_metadata is None else Y_metadata[i]))
|
||||
#Site parameters update
|
||||
delta_tau = self.delta/self.eta*(1./sigma2_hat[i] - 1./Sigma[i,i])
|
||||
delta_v = self.delta/self.eta*(mu_hat[i]/sigma2_hat[i] - mu[i]/Sigma[i,i])
|
||||
tau_tilde[i] += delta_tau
|
||||
v_tilde[i] += delta_v
|
||||
#Posterior distribution parameters update
|
||||
DSYR(Sigma, Sigma[:,i].copy(), -Delta_tau/(1.+ Delta_tau*Sigma[i,i]))
|
||||
DSYR(Sigma, Sigma[:,i].copy(), -delta_tau/(1.+ delta_tau*Sigma[i,i]))
|
||||
mu = np.dot(Sigma, v_tilde)
|
||||
iterations += 1
|
||||
|
||||
#(re) compute Sigma and mu using full Cholesky decompy
|
||||
tau_tilde_root = np.sqrt(tau_tilde)
|
||||
|
|
@ -99,10 +98,14 @@ class EP(object):
|
|||
mu = np.dot(Sigma,v_tilde)
|
||||
|
||||
#monitor convergence
|
||||
epsilon_np1 = np.mean(np.square(tau_tilde-tau_tilde_old))
|
||||
epsilon_np2 = np.mean(np.square(v_tilde-v_tilde_old))
|
||||
if iterations>0:
|
||||
tau_diff = np.mean(np.square(tau_tilde-tau_tilde_old))
|
||||
v_diff = np.mean(np.square(v_tilde-v_tilde_old))
|
||||
tau_tilde_old = tau_tilde.copy()
|
||||
v_tilde_old = v_tilde.copy()
|
||||
|
||||
return mu, Sigma, mu_tilde, tau_tilde
|
||||
iterations += 1
|
||||
|
||||
mu_tilde = v_tilde/tau_tilde
|
||||
return mu, Sigma, mu_tilde, tau_tilde, Z_hat
|
||||
|
||||
|
|
@ -5,6 +5,7 @@ import numpy as np
|
|||
from ..util.univariate_Gaussian import std_norm_pdf, std_norm_cdf
|
||||
import link_functions
|
||||
from likelihood import Likelihood
|
||||
from scipy import stats
|
||||
|
||||
class Bernoulli(Likelihood):
|
||||
"""
|
||||
|
|
@ -43,7 +44,7 @@ class Bernoulli(Likelihood):
|
|||
Y_prep[Y.flatten() == 0] = -1
|
||||
return Y_prep
|
||||
|
||||
def moments_match_ep(self, data_i, tau_i, v_i):
|
||||
def moments_match_ep(self, Y_i, tau_i, v_i):
|
||||
"""
|
||||
Moments match of the marginal approximation in EP algorithm
|
||||
|
||||
|
|
@ -51,9 +52,9 @@ class Bernoulli(Likelihood):
|
|||
:param tau_i: precision of the cavity distribution (float)
|
||||
:param v_i: mean/variance of the cavity distribution (float)
|
||||
"""
|
||||
if data_i == 1:
|
||||
if Y_i == 1:
|
||||
sign = 1.
|
||||
elif data_i == 0:
|
||||
elif Y_i == 0:
|
||||
sign = -1
|
||||
else:
|
||||
raise ValueError("bad value for Bernouilli observation (0, 1)")
|
||||
|
|
@ -76,7 +77,7 @@ class Bernoulli(Likelihood):
|
|||
|
||||
return Z_hat, mu_hat, sigma2_hat
|
||||
|
||||
def predictive_mean(self, mu, variance):
|
||||
def predictive_mean(self, mu, variance, Y_metadata=None):
|
||||
|
||||
if isinstance(self.gp_link, link_functions.Probit):
|
||||
return stats.norm.cdf(mu/np.sqrt(1+variance))
|
||||
|
|
@ -87,7 +88,7 @@ class Bernoulli(Likelihood):
|
|||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
def predictive_variance(self, mu, variance, pred_mean):
|
||||
def predictive_variance(self, mu, variance, pred_mean, Y_metadata=None):
|
||||
|
||||
if isinstance(self.gp_link, link_functions.Heaviside):
|
||||
return 0.
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class GPClassification(GP):
|
|||
|
||||
def __init__(self, X, Y, kernel=None):
|
||||
if kernel is None:
|
||||
kernel = kern.rbf(X.shape[1])
|
||||
kernel = kern.RBF(X.shape[1])
|
||||
|
||||
likelihood = likelihoods.Bernoulli()
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue