diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 5b356744..6f02d7df 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -60,10 +60,10 @@ class GP(Model): self.parameters_changed() def parameters_changed(self): - self.posterior = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y) + self.posterior, self._log_marginal_likelihood, self._dL_dK = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y) def log_likelihood(self): - return self.posterior.log_marginal + return self._log_marginal_likelihood def dL_dtheta_K(self): return self.kern.dK_dtheta(self.posterior.dL_dK, self.X) diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 5a1dc595..d91c2ca1 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -6,7 +6,6 @@ import pylab as pb from gp import GP from parameterization.param import Param from ..inference.latent_function_inference import varDTC -from posterior import Posterior class SparseGP(GP): """ diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index 3c874a06..9313316c 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -53,6 +53,6 @@ class ExactGaussianInference(object): likelihood.update_gradients(np.diag(dL_dK)) - return Posterior(log_marginal, dL_dK, LW, alpha, K) + return Posterior(LW, alpha, K), log_marginal, dL_dK diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index ae5bff37..c4b0ec62 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -14,10 +14,8 @@ class Posterior(object): schemes and the model classes. """ - def __init__(self, log_marginal, dL_dK, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None): + def __init__(self, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None): """ - log_marginal: log p(Y|X) - dL_dK: d/dK log p(Y|X) woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M K : the proir covariance (required for lazy computation of various quantities) @@ -26,8 +24,6 @@ class Posterior(object): Not all of the above need to be supplied! You *must* supply: - log_marginal - dL_dK K (for lazy computation) You may supply either: @@ -46,8 +42,6 @@ class Posterior(object): From the supplied quantities, all of the others will be computed on demand (lazy computation) """ #obligatory - self.log_marginal = log_marginal - self.dL_dK = dL_dK self._K = K if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)): diff --git a/GPy/inference/latent_function_inference/dtcvar.py b/GPy/inference/latent_function_inference/varDTC.py similarity index 98% rename from GPy/inference/latent_function_inference/dtcvar.py rename to GPy/inference/latent_function_inference/varDTC.py index af1dc87c..bd4a59e1 100644 --- a/GPy/inference/latent_function_inference/dtcvar.py +++ b/GPy/inference/latent_function_inference/varDTC.py @@ -2,10 +2,11 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) from posterior import Posterior -from .../util.linalg import pdinv, dpotrs, tdot +from ...util.linalg import pdinv, dpotrs, tdot +import numpy as np log_2_pi = np.log(2*np.pi) -class DTCVar(object): +class VarDTC(object): """ An object for inference when the likelihood is Gaussian, but we want to do sparse inference. @@ -19,7 +20,7 @@ class DTCVar(object): self._YYTfactor_cache = caching.cache() self.const_jitter = 1e-6 - def get_YYTfactor(self, Y): + def get_YYTfactor(self, Y): """ find a matrix L which satisfies LLT = YYT.