mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-15 06:52:39 +02:00
beginning of bgplvm with missing data
This commit is contained in:
parent
20f749ff0d
commit
825d3c2154
2 changed files with 43 additions and 8 deletions
|
|
@ -54,19 +54,21 @@ class SparseGP(GP):
|
|||
self.add_parameter(self.Z, index=0)
|
||||
self.parameters_changed()
|
||||
|
||||
|
||||
def parameters_changed(self):
|
||||
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
|
||||
|
||||
#The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed)
|
||||
def _update_gradients_Z(self, add=False):
|
||||
#The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed)
|
||||
if not self.Z.is_fixed:
|
||||
self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
|
||||
if add: self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
|
||||
else: self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
|
||||
if self.X_variance is None:
|
||||
self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X)
|
||||
else:
|
||||
self.Z.gradient += self.kern.dpsi1_dZ(self.grad_dict['dL_dpsi1'], self.Z, self.X, self.X_variance)
|
||||
self.Z.gradient += self.kern.dpsi2_dZ(self.grad_dict['dL_dpsi2'], self.Z, self.X, self.X_variance)
|
||||
|
||||
def parameters_changed(self):
|
||||
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
|
||||
self._update_gradients_Z(add=False)
|
||||
|
||||
def _raw_predict(self, Xnew, X_variance_new=None, which_parts='all', full_cov=False):
|
||||
"""
|
||||
Make a prediction for the latent function values
|
||||
|
|
|
|||
|
|
@ -72,9 +72,10 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
return 0.5 * (var_mean + var_S) - 0.5 * self.input_dim * self.num_data
|
||||
|
||||
def parameters_changed(self):
|
||||
super(BayesianGPLVM, self).parameters_changed()
|
||||
self._log_marginal_likelihood -= self.KL_divergence()
|
||||
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
|
||||
self._update_gradients_Z(add=False)
|
||||
|
||||
self._log_marginal_likelihood -= self.KL_divergence()
|
||||
dL_dmu, dL_dS = self.dL_dmuS()
|
||||
|
||||
# dL:
|
||||
|
|
@ -161,6 +162,38 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
|
||||
return dim_reduction_plots.plot_steepest_gradient_map(self,*args,**kwargs)
|
||||
|
||||
class BayesianGPLVMWithMissingData(BayesianGPLVM):
|
||||
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
|
||||
Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
|
||||
from ..util.subarray_and_sorting import common_subarrays
|
||||
self.subarrays = common_subarrays(Y)
|
||||
import ipdb;ipdb.set_trace()
|
||||
BayesianGPLVM.__init__(self, Y, input_dim, X=X, X_variance=X_variance, init=init, num_inducing=num_inducing, Z=Z, kernel=kernel, inference_method=inference_method, likelihood=likelihood, name=name, **kwargs)
|
||||
|
||||
|
||||
def parameters_changed(self):
|
||||
super(BayesianGPLVM, self).parameters_changed()
|
||||
self._log_marginal_likelihood -= self.KL_divergence()
|
||||
|
||||
dL_dmu, dL_dS = self.dL_dmuS()
|
||||
|
||||
# dL:
|
||||
self.q.mean.gradient = dL_dmu
|
||||
self.q.variance.gradient = dL_dS
|
||||
|
||||
# dKL:
|
||||
self.q.mean.gradient -= self.X
|
||||
self.q.variance.gradient -= (1. - (1. / (self.X_variance))) * 0.5
|
||||
|
||||
if __name__ == '__main__':
|
||||
import numpy as np
|
||||
X = np.random.randn(20,2)
|
||||
W = np.linspace(0,1,10)[None,:]
|
||||
Y = (X*W).sum(1)
|
||||
missing = np.random.binomial(1,.1,size=Y.shape)
|
||||
|
||||
pass
|
||||
|
||||
def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2):
|
||||
"""
|
||||
objective function for fitting the latent variables for test points
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue