From c1a416debc5c374c337e0bce61aff3f4f31ddde7 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Fri, 24 Jan 2014 14:15:32 +0000 Subject: [PATCH] more gradient based tomfoolery --- GPy/core/model.py | 3 ++- .../latent_function_inference/exact_gaussian_inference.py | 2 +- GPy/inference/latent_function_inference/posterior.py | 3 --- GPy/likelihoods/gaussian.py | 2 +- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/GPy/core/model.py b/GPy/core/model.py index ae265b0d..96fe8362 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -33,7 +33,8 @@ class Model(Parameterized): #def dK_d(self, param, dL_dK, X, X2) g = np.zeros(self.size) try: - [g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed] + #[g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed] + [g.__setitem__(s, p.gradient.flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed] except KeyError: raise KeyError, 'Gradient for {} not defined, please specify gradients for parameters to optimize'.format(p.name) return g diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index d9d344ab..3c874a06 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -49,7 +49,7 @@ class ExactGaussianInference(object): dL_dK = 0.5 * (tdot(alpha) - Y.shape[1] * Wi) - kern.update_gradients_full(dL_dK) + kern.update_gradients_full(dL_dK, X) likelihood.update_gradients(np.diag(dL_dK)) diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index 060c5352..609ba3bc 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -18,7 +18,6 @@ class Posterior(object): """ log_marginal: log p(Y|X) dL_dK: d/dK log p(Y|X) - dL_dtheta_lik : d/dtheta log p(Y|X) (where theta are the parameters of the likelihood) woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M K : the proir covariance (required for lazy computation of various quantities) @@ -29,7 +28,6 @@ class Posterior(object): log_marginal dL_dK - dL_dtheta_lik K (for lazy computation) You may supply either: @@ -50,7 +48,6 @@ class Posterior(object): #obligatory self.log_marginal = log_marginal self.dL_dK = dL_dK - self.dL_dtheta_lik = dL_dtheta_lik self._K = K if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)): diff --git a/GPy/likelihoods/gaussian.py b/GPy/likelihoods/gaussian.py index b32d353a..c047e573 100644 --- a/GPy/likelihoods/gaussian.py +++ b/GPy/likelihoods/gaussian.py @@ -52,7 +52,7 @@ class Gaussian(Likelihood): def covariance_matrix(self, Y, Y_metadata=None): return np.eye(Y.shape[0]) * self.variance - def set_gradients(self, partial): + def update_gradients(self, partial): self.variance.gradient = np.sum(partial) def _preprocess_values(self, Y):