From f2d5ee42eb05d5aaf517656b46f9dd6a59dd378a Mon Sep 17 00:00:00 2001 From: Zhenwen Dai Date: Mon, 17 Mar 2014 16:09:25 +0000 Subject: [PATCH 1/2] fix the bug regarding to the change of the name dL_dthetaL --- GPy/models/bayesian_gplvm.py | 2 +- GPy/models/ss_gplvm.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index 3617e260..e0818a2f 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -66,7 +66,7 @@ class BayesianGPLVM(SparseGP): super(BayesianGPLVM, self).parameters_changed() self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X) - self.X.mean.gradient, self.X.variance.gradient = self.kern.gradients_qX_expectations(variational_posterior=self.X, Z=self.Z, **self.grad_dict) + self.X.mean.gradient, self.X.variance.gradient = self.kern.gradients_qX_expectations(variational_posterior=self.X, Z=self.Z, dL_dpsi0=self.grad_dict['dL_dpsi0'], dL_dpsi1=self.grad_dict['dL_dpsi1'], dL_dpsi2=self.grad_dict['dL_dpsi2']) # update for the KL divergence self.variational_prior.update_gradients_KL(self.X) diff --git a/GPy/models/ss_gplvm.py b/GPy/models/ss_gplvm.py index 5994814b..1c2ecf4c 100644 --- a/GPy/models/ss_gplvm.py +++ b/GPy/models/ss_gplvm.py @@ -61,7 +61,7 @@ class SSGPLVM(SparseGP): super(SSGPLVM, self).parameters_changed() self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X) - self.X.mean.gradient, self.X.variance.gradient, self.X.binary_prob.gradient = self.kern.gradients_qX_expectations(variational_posterior=self.X, Z=self.Z, **self.grad_dict) + self.X.mean.gradient, self.X.variance.gradient, self.X.binary_prob.gradient = self.kern.gradients_qX_expectations(variational_posterior=self.X, Z=self.Z, dL_dpsi0=self.grad_dict['dL_dpsi0'], dL_dpsi1=self.grad_dict['dL_dpsi1'], dL_dpsi2=self.grad_dict['dL_dpsi2']) # update for the KL divergence self.variational_prior.update_gradients_KL(self.X) From caf1dc2609259d48af32d46009d09e8906961427 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Mon, 17 Mar 2014 16:38:04 +0000 Subject: [PATCH 2/2] dL_dthetaL in missing data vardtc --- GPy/inference/latent_function_inference/var_dtc.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/GPy/inference/latent_function_inference/var_dtc.py b/GPy/inference/latent_function_inference/var_dtc.py index 7a0c14e8..9b6e26c0 100644 --- a/GPy/inference/latent_function_inference/var_dtc.py +++ b/GPy/inference/latent_function_inference/var_dtc.py @@ -65,7 +65,7 @@ class VarDTC(object): _, output_dim = Y.shape #see whether we've got a different noise variance for each datum - beta = 1./np.fmax(likelihood.variance, 1e-6) + beta = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6) # VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency! #self.YYTfactor = self.get_YYTfactor(Y) #VVT_factor = self.get_VVTfactor(self.YYTfactor, beta) @@ -221,7 +221,7 @@ class VarDTCMissingData(object): psi2_all = None Ys, traces = self._Y(Y) - beta_all = 1./np.fmax(likelihood.variance, 1e-6) + beta_all = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6) het_noise = beta_all.size != 1 import itertools @@ -328,18 +328,20 @@ class VarDTCMissingData(object): diag.add(Bi, 1) woodbury_inv_all[:, :, ind] = backsub_both_sides(Lm, Bi)[:,:,None] + dL_dthetaL = likelihood.exact_inference_gradients(dL_dR) + # gradients: if uncertain_inputs: grad_dict = {'dL_dKmm': dL_dKmm, 'dL_dpsi0':dL_dpsi0_all, 'dL_dpsi1':dL_dpsi1_all, 'dL_dpsi2':dL_dpsi2_all, - 'dL_dR':dL_dR} + 'dL_dthetaL':dL_dthetaL} else: grad_dict = {'dL_dKmm': dL_dKmm, 'dL_dKdiag':dL_dpsi0_all, 'dL_dKnm':dL_dpsi1_all, - 'dL_dR':dL_dR} + 'dL_dthetaL':dL_dthetaL} #get sufficient things for posterior prediction #TODO: do we really want to do this in the loop?