Fixed MRD inducing point gradients

This commit is contained in:
Alan Saul 2015-09-24 13:44:28 +01:00
parent 76220cd4d2
commit 7b1c35f83f
5 changed files with 8 additions and 7 deletions

View file

@ -112,6 +112,7 @@ class SparseGP(GP):
#gradients wrt Z
self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X)
self._Zgrad = self.Z.gradient.copy()
def _raw_predict(self, Xnew, full_cov=False, kern=None):

View file

@ -406,9 +406,7 @@ def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw):
D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5
_, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, plot_sim)
# Ylist = [Ylist[0]]
k = kern.Linear(Q, ARD=True)
k = kern.Linear(Q) + kern.White(Q, variance=1e-4)
m = MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernel=k, initx="PCA_concat", initz='permute', **kw)
m['.*noise'] = [Y.var() / 40. for Y in Ylist]
@ -428,8 +426,7 @@ def mrd_simulation_missing_data(optimize=True, verbose=True, plot=True, plot_sim
D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5
_, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
# Ylist = [Ylist[0]]
k = kern.Linear(Q, ARD=True)
k = kern.Linear(Q) + kern.White(Q, variance=1e-4)
inanlist = []
for Y in Ylist:

View file

@ -97,7 +97,7 @@ class BayesianGPLVM(SparseGP_MPI):
dL_dpsi2=self.grad_dict['dL_dpsi2'])
self.variational_prior.update_gradients_KL(self.X)
self._Xgrad = self.X.gradient.copy()
#super(BayesianGPLVM, self).parameters_changed()
#self._log_marginal_likelihood -= self.variational_prior.KL_divergence(self.X)

View file

@ -15,6 +15,7 @@ from ..util.initialization import initialize_latent
from ..core.sparse_gp import SparseGP, GP
from GPy.core.parameterization.variational import VariationalPosterior
from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
from GPy.models.bayesian_gplvm import BayesianGPLVM
from GPy.models.sparse_gp_minibatch import SparseGPMiniBatch
class MRD(BayesianGPLVMMiniBatch):
@ -170,7 +171,8 @@ class MRD(BayesianGPLVMMiniBatch):
self._log_marginal_likelihood += b._log_marginal_likelihood
self.logger.info('working on im <{}>'.format(hex(id(i))))
self.Z.gradient[:] += b.Z.gradient#full_values['Zgrad']
self.Z.gradient[:] += b._Zgrad # b.Z.gradient # full_values['Zgrad']
#grad_dict = b.full_values
if self.has_uncertain_inputs():

View file

@ -321,3 +321,4 @@ class SparseGPMiniBatch(SparseGP):
else:
self.posterior, self._log_marginal_likelihood, self.grad_dict = self._inner_parameters_changed(self.kern, self.X, self.Z, self.likelihood, self.Y_normalized, self.Y_metadata)
self._outer_values_update(self.grad_dict)
self._Zgrad = self.Z.gradient.copy()