mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-09 20:12:38 +02:00
[MRD] fixed mrd for new structure
This commit is contained in:
parent
41cc9c62f5
commit
87ee4ae548
2 changed files with 10 additions and 10 deletions
|
|
@ -107,8 +107,7 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
|
||||||
self.X.gradient += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X)
|
self.X.gradient += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X)
|
||||||
|
|
||||||
def _outer_init_full_values(self):
|
def _outer_init_full_values(self):
|
||||||
full_values = super(BayesianGPLVMMiniBatch, self)._outer_init_full_values()
|
return super(BayesianGPLVMMiniBatch, self)._outer_init_full_values()
|
||||||
return full_values
|
|
||||||
|
|
||||||
def parameters_changed(self):
|
def parameters_changed(self):
|
||||||
super(BayesianGPLVMMiniBatch,self).parameters_changed()
|
super(BayesianGPLVMMiniBatch,self).parameters_changed()
|
||||||
|
|
@ -134,6 +133,8 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch):
|
||||||
d = self.output_dim
|
d = self.output_dim
|
||||||
self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d
|
self._log_marginal_likelihood -= kl_fctr*self.variational_prior.KL_divergence(self.X)*self.stochastics.batchsize/d
|
||||||
|
|
||||||
|
self._Xgrad = self.X.gradient.copy()
|
||||||
|
|
||||||
def plot_latent(self, labels=None, which_indices=None,
|
def plot_latent(self, labels=None, which_indices=None,
|
||||||
resolution=50, ax=None, marker='o', s=40,
|
resolution=50, ax=None, marker='o', s=40,
|
||||||
fignum=None, plot_inducing=True, legend=True,
|
fignum=None, plot_inducing=True, legend=True,
|
||||||
|
|
|
||||||
|
|
@ -174,16 +174,15 @@ class MRD(BayesianGPLVMMiniBatch):
|
||||||
#grad_dict = b.full_values
|
#grad_dict = b.full_values
|
||||||
|
|
||||||
if self.has_uncertain_inputs():
|
if self.has_uncertain_inputs():
|
||||||
self.X.mean.gradient += b.X.mean.gradient
|
self.X.gradient += b._Xgrad
|
||||||
self.X.variance.gradient += b.X.variance.gradient
|
|
||||||
else:
|
else:
|
||||||
self.X.gradient += b.X.gradient
|
self.X.gradient += b._Xgrad
|
||||||
|
|
||||||
if self.has_uncertain_inputs():
|
#if self.has_uncertain_inputs():
|
||||||
# update for the KL divergence
|
# # update for the KL divergence
|
||||||
self.variational_prior.update_gradients_KL(self.X)
|
# self.variational_prior.update_gradients_KL(self.X)
|
||||||
self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)
|
# self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)
|
||||||
pass
|
# pass
|
||||||
|
|
||||||
def log_likelihood(self):
|
def log_likelihood(self):
|
||||||
return self._log_marginal_likelihood
|
return self._log_marginal_likelihood
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue