everything is broken

This commit is contained in:
James Hensman 2014-02-20 14:04:16 +00:00
parent de51ad638a
commit d636c8c30c
13 changed files with 325 additions and 323 deletions

View file

@ -53,20 +53,19 @@ class SparseGP(GP):
self.add_parameter(self.Z, index=0)
self.parameters_changed()
def _update_gradients_Z(self, add=False):
#The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed)
def _gradients_Z(self):
#The derivative of the bound wrt the inducing inputs Z ( unless they're all fixed)
if not self.Z.is_fixed:
if add: self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
else: self.Z.gradient = self.kern.gradients_X(self.grad_dict['dL_dKmm'], self.Z)
if self.X_variance is None:
self.Z.gradient += self.kern.gradients_X(self.grad_dict['dL_dKnm'].T, self.Z, self.X)
self.Z.gradient = self.kern.gradients_Z_sparse(X=self.X, Z=self.Z, **self.grad_dict)
else:
self.Z.gradient += self.kern.dpsi1_dZ(self.grad_dict['dL_dpsi1'], self.Z, self.X, self.X_variance)
self.Z.gradient += self.kern.dpsi2_dZ(self.grad_dict['dL_dpsi2'], self.Z, self.X, self.X_variance)
self.Z.gradient = self.kern.gradients_Z_variational(mu=self.X, S=self.X_variance, Z=self.Z, **self.grad_dict)
print self.Z.gradient
print id(self.Z)
def parameters_changed(self):
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.X_variance, self.Z, self.likelihood, self.Y)
self._update_gradients_Z(add=False)
self.Z.gradient = self._gradients_Z()
def _raw_predict(self, Xnew, X_variance_new=None, full_cov=False):
"""