changed to 'update_gradients_q_variational'

This commit is contained in:
Zhenwen Dai 2014-02-24 11:33:58 +00:00
parent c2750ae719
commit f311bfdf17
3 changed files with 7 additions and 8 deletions

View file

@ -63,7 +63,7 @@ class NormalPosterior(VariationalPosterior):
from ...plotting.matplot_dep import variational_plots from ...plotting.matplot_dep import variational_plots
return variational_plots.plot(self,*args) return variational_plots.plot(self,*args)
class SpikeAndSlab(VariationalPosterior): class SpikeAndSlabPosterior(VariationalPosterior):
''' '''
The SpikeAndSlab distribution for variational approximations. The SpikeAndSlab distribution for variational approximations.
''' '''
@ -71,7 +71,7 @@ class SpikeAndSlab(VariationalPosterior):
""" """
binary_prob : the probability of the distribution on the slab part. binary_prob : the probability of the distribution on the slab part.
""" """
super(SpikeAndSlab, self).__init__(means, variances, name) super(SpikeAndSlabPosterior, self).__init__(means, variances, name)
self.gamma = Param("binary_prob",binary_prob,) self.gamma = Param("binary_prob",binary_prob,)
self.add_parameter(self.gamma) self.add_parameter(self.gamma)

View file

@ -182,7 +182,7 @@ class RBF(Kern):
return grad return grad
def gradients_q_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational): def update_gradients_q_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational):
mu = posterior_variational.mean mu = posterior_variational.mean
S = posterior_variational.variance S = posterior_variational.variance
self._psi_computations(Z, mu, S) self._psi_computations(Z, mu, S)
@ -195,7 +195,8 @@ class RBF(Kern):
grad_mu += -2.*(dL_dpsi2[:, :, :, None] * tmp * self._psi2_mudist).sum(1).sum(1) grad_mu += -2.*(dL_dpsi2[:, :, :, None] * tmp * self._psi2_mudist).sum(1).sum(1)
grad_S += (dL_dpsi2[:, :, :, None] * tmp * (2.*self._psi2_mudist_sq - 1)).sum(1).sum(1) grad_S += (dL_dpsi2[:, :, :, None] * tmp * (2.*self._psi2_mudist_sq - 1)).sum(1).sum(1)
return grad_mu, grad_S posterior_variational.mean.gradient = grad_mu
posterior_variational.variance.gradient = grad_S
def gradients_X(self, dL_dK, X, X2=None): def gradients_X(self, dL_dK, X, X2=None):
#if self._X is None or X.base is not self._X.base or X2 is not None: #if self._X is None or X.base is not self._X.base or X2 is not None:

View file

@ -63,9 +63,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
super(BayesianGPLVM, self).parameters_changed() super(BayesianGPLVM, self).parameters_changed()
self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.q) self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.q)
# TODO: This has to go into kern self.kern.update_gradients_q_variational(posterior_variational=self.q, Z=self.Z, **self.grad_dict)
# maybe a update_gradients_q_variational?
self.q.mean.gradient, self.q.variance.gradient = self.kern.gradients_q_variational(posterior_variational=self.q, Z=self.Z, **self.grad_dict)
# update for the KL divergence # update for the KL divergence
self.variational_prior.update_gradients_KL(self.q) self.variational_prior.update_gradients_KL(self.q)