Part working version of sympy covariance with new params version.

This commit is contained in:
Neil Lawrence 2014-02-24 21:16:26 +00:00
parent 3968d48ba5
commit 339b5caa76
6 changed files with 182 additions and 479 deletions

View file

@ -36,7 +36,7 @@ class Kern(Parameterized):
raise NotImplementedError
def gradients_X_diag(self, dL_dK, X):
raise NotImplementedError
def update_gradients_full(self, dL_dK, X):
def update_gradients_full(self, dL_dK, X, X2):
"""Set the gradients of all parameters when doing full (N) inference."""
raise NotImplementedError
def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
@ -125,7 +125,7 @@ class Kern(Parameterized):
from GPy.core.model import Model
class Kern_check_model(Model):
"""This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgradient() to be called independently on a kernel."""
"""This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgrad() to be called independently on a kernel."""
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Model.__init__(self, 'kernel_test_model')
num_samples = 20
@ -165,9 +165,10 @@ class Kern_check_dK_dtheta(Kern_check_model):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
def _log_likelihood_gradients(self):
return self.kernel._param_grad_helper(self.dL_dK, self.X, self.X2)
target = np.zeros_like(self._get_params())
self.kernel._param_grad_helper(self.dL_dK, self.X, self.X2, target)
return target