From 0258abf5c4cc3f242c258d4e732abb49edaeafb3 Mon Sep 17 00:00:00 2001 From: Zhenwen Dai Date: Tue, 4 Mar 2014 10:39:56 +0000 Subject: [PATCH] [SSGPLVM] Learn prior parameters --- GPy/core/parameterization/variational.py | 6 ++++-- GPy/models/ss_gplvm.py | 5 ++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/GPy/core/parameterization/variational.py b/GPy/core/parameterization/variational.py index c51a8021..d204b166 100644 --- a/GPy/core/parameterization/variational.py +++ b/GPy/core/parameterization/variational.py @@ -34,12 +34,12 @@ class NormalPrior(VariationalPrior): variational_posterior.variance.gradient -= (1. - (1. / (variational_posterior.variance))) * 0.5 class SpikeAndSlabPrior(VariationalPrior): - def __init__(self, variance = 1.0, pi = 0.5, name='SpikeAndSlabPrior', **kw): + def __init__(self, pi, variance = 1.0, name='SpikeAndSlabPrior', **kw): super(VariationalPrior, self).__init__(name=name, **kw) assert variance==1.0, "Not Implemented!" self.pi = Param('pi', pi) self.variance = Param('variance',variance) - self.add_parameters(self.pi, self.variance) + self.add_parameters(self.pi) def KL_divergence(self, variational_posterior): mu = variational_posterior.mean @@ -58,6 +58,8 @@ class SpikeAndSlabPrior(VariationalPrior): gamma.gradient -= np.log((1-self.pi)/self.pi*gamma/(1.-gamma))+(np.square(mu)+S-np.log(S)-1.)/2. mu.gradient -= gamma*mu S.gradient -= (1. - (1. / (S))) * gamma /2. + self.pi.gradient = (gamma/self.pi - (1.-gamma)/(1.-self.pi)).sum(axis=0) + class VariationalPosterior(Parameterized): diff --git a/GPy/models/ss_gplvm.py b/GPy/models/ss_gplvm.py index 94682c74..0c276fe3 100644 --- a/GPy/models/ss_gplvm.py +++ b/GPy/models/ss_gplvm.py @@ -48,11 +48,14 @@ class SSGPLVM(SparseGP): if kernel is None: kernel = kern.SSRBF(input_dim) - self.variational_prior = SpikeAndSlabPrior(pi=0.5) # the prior probability of the latent binary variable b + pi = np.empty((input_dim)) + pi[:] = 0.5 + self.variational_prior = SpikeAndSlabPrior(pi=pi) # the prior probability of the latent binary variable b X = SpikeAndSlabPosterior(X, X_variance, gamma) SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs) self.add_parameter(self.X, index=0) + self.add_parameter(self.variational_prior) def parameters_changed(self): super(SSGPLVM, self).parameters_changed()