From 9680a139d43ac208063a309afc9bae36b4a46978 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Tue, 18 Mar 2014 12:28:46 +0000 Subject: [PATCH] changed the way the Gaussian likelihood interfaces, to enable mixed_noise things --- GPy/core/gp.py | 6 ++-- .../latent_function_inference/dtc.py | 32 ++++++++----------- .../exact_gaussian_inference.py | 5 ++- .../latent_function_inference/var_dtc.py | 4 +-- GPy/likelihoods/gaussian.py | 4 +-- 5 files changed, 23 insertions(+), 28 deletions(-) diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 70b7d695..e052ff35 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -42,10 +42,8 @@ class GP(Model): assert Y.shape[0] == self.num_data _, self.output_dim = self.Y.shape - if Y_metadata is None: - Y_metadata = {} - else: - self.Y_metadata = Y_metadata + #TODO: check the type of this is okay? + self.Y_metadata = Y_metadata assert isinstance(kernel, kern.Kern) #assert self.input_dim == kernel.input_dim diff --git a/GPy/inference/latent_function_inference/dtc.py b/GPy/inference/latent_function_inference/dtc.py index 5ebc5e53..89140ce2 100644 --- a/GPy/inference/latent_function_inference/dtc.py +++ b/GPy/inference/latent_function_inference/dtc.py @@ -91,12 +91,8 @@ class vDTC(object): def __init__(self): self.const_jitter = 1e-6 - def inference(self, kern, X, X_variance, Z, likelihood, Y): - assert X_variance is None, "cannot use X_variance with DTC. Try varDTC." - - #TODO: MAX! fix this! - from ...util.misc import param_to_array - Y = param_to_array(Y) + def inference(self, kern, X, Z, likelihood, Y): + #assert X_variance is None, "cannot use X_variance with DTC. Try varDTC." num_inducing, _ = Z.shape num_data, output_dim = Y.shape @@ -109,15 +105,14 @@ class vDTC(object): Kmm = kern.K(Z) Knn = kern.Kdiag(X) Knm = kern.K(X, Z) - U = Knm - Uy = np.dot(U.T,Y) + KnmY = np.dot(Knm.T,Y) - #factor Kmm + #factor Kmm Kmmi, L, Li, _ = pdinv(Kmm) # Compute A - LiUTbeta = np.dot(Li, U.T)*np.sqrt(beta) - A_ = tdot(LiUTbeta) + LiKmnbeta = np.dot(Li, Knm.T)*np.sqrt(beta) + A_ = tdot(LiKmnbeta) trace_term = -0.5*(np.sum(Knn)*beta - np.trace(A_)) A = A_ + np.eye(num_inducing) @@ -125,7 +120,7 @@ class vDTC(object): LA = jitchol(A) # back substutue to get b, P, v - tmp, _ = dtrtrs(L, Uy, lower=1) + tmp, _ = dtrtrs(L, KnmY, lower=1) b, _ = dtrtrs(LA, tmp*beta, lower=1) tmp, _ = dtrtrs(LA, b, lower=1, trans=1) v, _ = dtrtrs(L, tmp, lower=1, trans=1) @@ -145,19 +140,18 @@ class vDTC(object): LAL = Li.T.dot(A).dot(Li) dL_dK = Kmmi - 0.5*(vvT_P + LAL) - # Compute dL_dU + # Compute dL_dKnm vY = np.dot(v.reshape(-1,1),Y.T) - #dL_dU = vY - np.dot(vvT_P, U.T) - dL_dU = vY - np.dot(vvT_P - Kmmi, U.T) - dL_dU *= beta + dL_dKmn = vY - np.dot(vvT_P - Kmmi, Knm.T) + dL_dKmn *= beta #compute dL_dR - Uv = np.dot(U, v) - dL_dR = 0.5*(np.sum(U*np.dot(U,P), 1) - 1./beta + np.sum(np.square(Y), 1) - 2.*np.sum(Uv*Y, 1) + np.sum(np.square(Uv), 1) )*beta**2 + Knmv = np.dot(Knm, v) + dL_dR = 0.5*(np.sum(Knm*np.dot(Knm,P), 1) - 1./beta + np.sum(np.square(Y), 1) - 2.*np.sum(Knmv*Y, 1) + np.sum(np.square(Knmv), 1) )*beta**2 dL_dR -=beta*trace_term/num_data dL_dthetaL = likelihood.exact_inference_gradients(dL_dR) - grad_dict = {'dL_dKmm': dL_dK, 'dL_dKdiag':np.zeros_like(Knn) + -0.5*beta, 'dL_dKnm':dL_dU.T, 'dL_dthetaL':dL_dthetaL} + grad_dict = {'dL_dKmm': dL_dK, 'dL_dKdiag':np.zeros_like(Knn) + -0.5*beta, 'dL_dKnm':dL_dKmn.T, 'dL_dthetaL':dL_dthetaL} #construct a posterior object post = Posterior(woodbury_inv=Kmmi-P, woodbury_vector=v, K=Kmm, mean=None, cov=None, K_chol=L) diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index e76575c6..ca1b92c6 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -3,6 +3,7 @@ from posterior import Posterior from ...util.linalg import pdinv, dpotrs, tdot +from ...util import diag import numpy as np log_2_pi = np.log(2*np.pi) @@ -41,7 +42,9 @@ class ExactGaussianInference(object): K = kern.K(X) - Wi, LW, LWi, W_logdet = pdinv(K + likelihood.covariance_matrix(Y, Y_metadata)) + Ky = K.copy() + diag.add(Ky, likelihood.gaussian_variance(Y, Y_metadata)) + Wi, LW, LWi, W_logdet = pdinv(Ky) alpha, _ = dpotrs(LW, YYT_factor, lower=1) diff --git a/GPy/inference/latent_function_inference/var_dtc.py b/GPy/inference/latent_function_inference/var_dtc.py index 9b6e26c0..97d54624 100644 --- a/GPy/inference/latent_function_inference/var_dtc.py +++ b/GPy/inference/latent_function_inference/var_dtc.py @@ -48,7 +48,7 @@ class VarDTC(object): def get_VVTfactor(self, Y, prec): return Y * prec # TODO chache this, and make it effective - def inference(self, kern, X, Z, likelihood, Y): + def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None): if isinstance(X, VariationalPosterior): uncertain_inputs = True psi0 = kern.psi0(Z, X) @@ -65,7 +65,7 @@ class VarDTC(object): _, output_dim = Y.shape #see whether we've got a different noise variance for each datum - beta = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6) + beta = 1./np.fmax(likelihood.gaussian_variance(Y, Y_metadata), 1e-6) # VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency! #self.YYTfactor = self.get_YYTfactor(Y) #VVT_factor = self.get_VVTfactor(self.YYTfactor, beta) diff --git a/GPy/likelihoods/gaussian.py b/GPy/likelihoods/gaussian.py index 101aac4b..79d62bb7 100644 --- a/GPy/likelihoods/gaussian.py +++ b/GPy/likelihoods/gaussian.py @@ -50,8 +50,8 @@ class Gaussian(Likelihood): if isinstance(gp_link, link_functions.Identity): self.log_concave = True - def covariance_matrix(self, Y, Y_metadata=None): - return np.eye(Y.shape[0]) * self.variance + def gaussian_variance(self, Y, Y_metadata=None): + return self.variance def update_gradients(self, grad): self.variance.gradient = grad