From 87ee4ae5480ca48b9bd0728121ab0d3142a4e95b Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 4 Sep 2015 16:43:26 +0100 Subject: [PATCH] [MRD] fixed mrd for new structure --- GPy/models/bayesian_gplvm_minibatch.py | 5 +++-- GPy/models/mrd.py | 15 +++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/GPy/models/bayesian_gplvm_minibatch.py b/GPy/models/bayesian_gplvm_minibatch.py index fcbc166d..0394ff7e 100644 --- a/GPy/models/bayesian_gplvm_minibatch.py +++ b/GPy/models/bayesian_gplvm_minibatch.py @@ -107,8 +107,7 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): self.X.gradient += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X) def _outer_init_full_values(self): - full_values = super(BayesianGPLVMMiniBatch, self)._outer_init_full_values() - return full_values + return super(BayesianGPLVMMiniBatch, self)._outer_init_full_values() def parameters_changed(self): super(BayesianGPLVMMiniBatch,self).parameters_changed() @@ -134,6 +133,8 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): d = self.output_dim self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d + self._Xgrad = self.X.gradient.copy() + def plot_latent(self, labels=None, which_indices=None, resolution=50, ax=None, marker='o', s=40, fignum=None, plot_inducing=True, legend=True, diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index ebb5a960..cb98e1a8 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -174,16 +174,15 @@ class MRD(BayesianGPLVMMiniBatch): #grad_dict = b.full_values if self.has_uncertain_inputs(): - self.X.mean.gradient += b.X.mean.gradient - self.X.variance.gradient += b.X.variance.gradient + self.X.gradient += b._Xgrad else: - self.X.gradient += b.X.gradient + self.X.gradient += b._Xgrad - if self.has_uncertain_inputs(): - # update for the KL divergence - self.variational_prior.update_gradients_KL(self.X) - self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X) - pass + #if self.has_uncertain_inputs(): + # # update for the KL divergence + # self.variational_prior.update_gradients_KL(self.X) + # self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X) + # pass def log_likelihood(self): return self._log_marginal_likelihood