[SSGPLVM] Learn prior parameters

This commit is contained in:
Zhenwen Dai 2014-03-04 10:39:56 +00:00
parent 0f6004034e
commit 0258abf5c4
2 changed files with 8 additions and 3 deletions

View file

@ -34,12 +34,12 @@ class NormalPrior(VariationalPrior):
variational_posterior.variance.gradient -= (1. - (1. / (variational_posterior.variance))) * 0.5 variational_posterior.variance.gradient -= (1. - (1. / (variational_posterior.variance))) * 0.5
class SpikeAndSlabPrior(VariationalPrior): class SpikeAndSlabPrior(VariationalPrior):
def __init__(self, variance = 1.0, pi = 0.5, name='SpikeAndSlabPrior', **kw): def __init__(self, pi, variance = 1.0, name='SpikeAndSlabPrior', **kw):
super(VariationalPrior, self).__init__(name=name, **kw) super(VariationalPrior, self).__init__(name=name, **kw)
assert variance==1.0, "Not Implemented!" assert variance==1.0, "Not Implemented!"
self.pi = Param('pi', pi) self.pi = Param('pi', pi)
self.variance = Param('variance',variance) self.variance = Param('variance',variance)
self.add_parameters(self.pi, self.variance) self.add_parameters(self.pi)
def KL_divergence(self, variational_posterior): def KL_divergence(self, variational_posterior):
mu = variational_posterior.mean mu = variational_posterior.mean
@ -58,6 +58,8 @@ class SpikeAndSlabPrior(VariationalPrior):
gamma.gradient -= np.log((1-self.pi)/self.pi*gamma/(1.-gamma))+(np.square(mu)+S-np.log(S)-1.)/2. gamma.gradient -= np.log((1-self.pi)/self.pi*gamma/(1.-gamma))+(np.square(mu)+S-np.log(S)-1.)/2.
mu.gradient -= gamma*mu mu.gradient -= gamma*mu
S.gradient -= (1. - (1. / (S))) * gamma /2. S.gradient -= (1. - (1. / (S))) * gamma /2.
self.pi.gradient = (gamma/self.pi - (1.-gamma)/(1.-self.pi)).sum(axis=0)
class VariationalPosterior(Parameterized): class VariationalPosterior(Parameterized):

View file

@ -48,11 +48,14 @@ class SSGPLVM(SparseGP):
if kernel is None: if kernel is None:
kernel = kern.SSRBF(input_dim) kernel = kern.SSRBF(input_dim)
self.variational_prior = SpikeAndSlabPrior(pi=0.5) # the prior probability of the latent binary variable b pi = np.empty((input_dim))
pi[:] = 0.5
self.variational_prior = SpikeAndSlabPrior(pi=pi) # the prior probability of the latent binary variable b
X = SpikeAndSlabPosterior(X, X_variance, gamma) X = SpikeAndSlabPosterior(X, X_variance, gamma)
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs) SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs)
self.add_parameter(self.X, index=0) self.add_parameter(self.X, index=0)
self.add_parameter(self.variational_prior)
def parameters_changed(self): def parameters_changed(self):
super(SSGPLVM, self).parameters_changed() super(SSGPLVM, self).parameters_changed()