Fixed MRD inducing point gradients

This commit is contained in:
Alan Saul 2015-09-24 13:44:28 +01:00
parent 76220cd4d2
commit 7b1c35f83f
5 changed files with 8 additions and 7 deletions

View file

@ -97,7 +97,7 @@ class BayesianGPLVM(SparseGP_MPI):
dL_dpsi2=self.grad_dict['dL_dpsi2'])
self.variational_prior.update_gradients_KL(self.X)
self._Xgrad = self.X.gradient.copy()
#super(BayesianGPLVM, self).parameters_changed()
#self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)

View file

@ -15,6 +15,7 @@ from ..util.initialization import initialize_latent
from ..core.sparse_gp import SparseGP, GP
from GPy.core.parameterization.variational import VariationalPosterior
from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
from GPy.models.bayesian_gplvm import BayesianGPLVM
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
class MRD(BayesianGPLVMMiniBatch):
@ -170,7 +171,8 @@ class MRD(BayesianGPLVMMiniBatch):
self._log_marginal_likelihood += b._log_marginal_likelihood
self.logger.info('working on im <{}>'.format(hex(id(i))))
self.Z.gradient[:] += b.Z.gradient#full_values['Zgrad']
self.Z.gradient[:] += b._Zgrad # b.Z.gradient # full_values['Zgrad']
#grad_dict = b.full_values
if self.has_uncertain_inputs():

View file

@ -321,3 +321,4 @@ class SparseGPMiniBatch(SparseGP):
else:
self.posterior, self._log_marginal_likelihood, self.grad_dict = self._inner_parameters_changed(self.kern, self.X, self.Z, self.likelihood, self.Y_normalized, self.Y_metadata)
self._outer_values_update(self.grad_dict)
self._Zgrad = self.Z.gradient.copy()