From 79dd8214241c29a70ccb7240353d99c87fdab1aa Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 4 Sep 2015 11:07:39 +0100 Subject: [PATCH] [mrd] fixes for updates on psi2 --- GPy/models/bayesian_gplvm_minibatch.py | 46 ++++++++++++-------------- GPy/models/mrd.py | 10 +++--- 2 files changed, 27 insertions(+), 29 deletions(-) diff --git a/GPy/models/bayesian_gplvm_minibatch.py b/GPy/models/bayesian_gplvm_minibatch.py index 1d06f07f..fcbc166d 100644 --- a/GPy/models/bayesian_gplvm_minibatch.py +++ b/GPy/models/bayesian_gplvm_minibatch.py @@ -100,22 +100,8 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): dL_dpsi2=full_values['dL_dpsi2'], psi0=self.psi0, psi1=self.psi1, psi2=self.psi2) - kl_fctr = self.kl_factr - - self.X.mean.gradient[:] = 0 - self.X.variance.gradient[:] = 0 - self.variational_prior.update_gradients_KL(self.X) - - if self.missing_data or not self.stochastics: - self.X.mean.gradient = kl_fctr*self.X.mean.gradient - self.X.variance.gradient = kl_fctr*self.X.variance.gradient - else: - d = self.output_dim - self.X.mean.gradient = kl_fctr*self.X.mean.gradient*self.stochastics.batchsize/d - self.X.variance.gradient = kl_fctr*self.X.variance.gradient*self.stochastics.batchsize/d - self.X.mean.gradient += meangrad_tmp - self.X.variance.gradient += vargrad_tmp - + self.X.mean.gradient = meangrad_tmp + self.X.variance.gradient = vargrad_tmp else: self.X.gradient = self.kern.gradients_X(full_values['dL_dKnm'], self.X, self.Z) self.X.gradient += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X) @@ -126,15 +112,27 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): def parameters_changed(self): super(BayesianGPLVMMiniBatch,self).parameters_changed() - kl_fctr = self.kl_factr - if self.missing_data or not self.stochastics: - self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X) - elif self.stochastics: - d = self.output_dim - self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d - if isinstance(self.inference_method, VarDTC_minibatch): - return + kl_fctr = self.kl_factr + if kl_fctr > 0: + Xgrad = self.X.gradient.copy() + self.X.gradient[:] = 0 + self.variational_prior.update_gradients_KL(self.X) + + if self.missing_data or not self.stochastics: + self.X.mean.gradient = kl_fctr*self.X.mean.gradient + self.X.variance.gradient = kl_fctr*self.X.variance.gradient + else: + d = self.output_dim + self.X.mean.gradient = kl_fctr*self.X.mean.gradient*self.stochastics.batchsize/d + self.X.variance.gradient = kl_fctr*self.X.variance.gradient*self.stochastics.batchsize/d + self.X.gradient += Xgrad + + if self.missing_data or not self.stochastics: + self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X) + elif self.stochastics: + d = self.output_dim + self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d def plot_latent(self, labels=None, which_indices=None, resolution=50, ax=None, marker='o', s=40, diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index be01b769..ebb5a960 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -170,14 +170,14 @@ class MRD(BayesianGPLVMMiniBatch): self._log_marginal_likelihood += b._log_marginal_likelihood self.logger.info('working on im <{}>'.format(hex(id(i)))) - self.Z.gradient[:] += b.full_values['Zgrad'] - grad_dict = b.full_values + self.Z.gradient[:] += b.Z.gradient#full_values['Zgrad'] + #grad_dict = b.full_values if self.has_uncertain_inputs(): - self.X.mean.gradient += grad_dict['meangrad'] - self.X.variance.gradient += grad_dict['vargrad'] + self.X.mean.gradient += b.X.mean.gradient + self.X.variance.gradient += b.X.variance.gradient else: - self.X.gradient += grad_dict['Xgrad'] + self.X.gradient += b.X.gradient if self.has_uncertain_inputs(): # update for the KL divergence