From 65702c1448ff5782b374f10d02ba965ac9891c74 Mon Sep 17 00:00:00 2001 From: Neil Lawrence Date: Tue, 20 Aug 2013 19:38:12 +0200 Subject: [PATCH] Added models for testing kernel gradients in unit tests. --- GPy/kern/kern.py | 32 ------------ GPy/notes.txt | 4 +- GPy/testing/kernel_tests.py | 100 +++++++++++++++++++++++++++++++++++- 3 files changed, 102 insertions(+), 34 deletions(-) diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index d78297d0..04c63773 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -536,35 +536,3 @@ class kern(Parameterized): else: raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions" - def objective_and_gradients_dK_dtheta(self, param, X, X2=None): - self._set_param(param) - K = self.K(X, X2) - f = K.sum() - dL_dK = np.ones_like(K) - g = self.dK_dtheta(param, dL_dK, X, X2) - return f, g - - def objective_and_gradients_dK_dX(self, param, X, X2=None): - self._set_param(param) - K = self.K(X, X2) - f = K.sum() - dL_dK = np.ones_like(K) - g = self.dK_dX(param, dL_dK, X, X2) - return f, g - - def objective_and_gradients_dKdiag_dtheta(self, param, X, X2=None): - self._set_param(param) - Kdiag = self.Kdiag(X) - f = Kdiag.sum() - dL_dK = np.ones_like(Kdiag) - g = self.dKdiag_dtheta(param, dL_dK, X) - return f, g - - def objective_and_gradients_dKdiag_dX(self, param, X, X2=None): - self._set_param(param) - Kdiag = self.Kdiag(X) - f = Kdiag.sum() - dL_dK = np.ones_like(Kdiag) - g = self.dK_dX(param, dL_dK, X) - return f, g - diff --git a/GPy/notes.txt b/GPy/notes.txt index cae1f337..c9c29ace 100644 --- a/GPy/notes.txt +++ b/GPy/notes.txt @@ -24,6 +24,8 @@ Need to tidy up classification.py, many examples include help that doesn't apply (it is suggested that you can try different approximation types) -Shall we overload the ** operator to have tensor products? +Shall we overload the ** operator to have tensor products? (I've done this now we can see if we like it) People aren't filling the doc strings in as they go *everyone* needs to get in the habit of this (and modifying them as they edit, or correcting them when there is a problem). + +Need some nice way of explaining how to compile documentation and run the unit tests, could this be in a readme or FAQ somewhere? Maybe it's there already somewhere and I've missed it. diff --git a/GPy/testing/kernel_tests.py b/GPy/testing/kernel_tests.py index cd58a7e0..4e264ca3 100644 --- a/GPy/testing/kernel_tests.py +++ b/GPy/testing/kernel_tests.py @@ -1,20 +1,118 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) import unittest import numpy as np import GPy +from GPy.core.model import Model + +class Kern_check_model(Model): + """This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgradient() to be called independently on a kernel.""" + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + num_samples = 20 + num_samples2 = 10 + if kernel==None: + kernel = GPy.kern.rbf(1) + if X==None: + X = np.random.randn(num_samples, kernel.input_dim) + if X2==None: + X2 = np.random.randn(num_samples2, kernel.input_dim) + if dL_dK==None: + dL_dK = np.ones((X.shape[0], X2.shape[0])) + + self.kernel=kernel + self.X = X + self.X2 = X2 + self.dL_dK = dL_dK + #self.constrained_indices=[] + #self.constraints=[] + Model.__init__(self) + + def is_positive_definite(self): + v = np.linalg.eig(self.kernel.K(self.X))[0] + if any(v<0): + return False + else: + return True + + def _get_params(self): + return self.kernel._get_params() + + def _get_param_names(self): + return self.kernel._get_param_names() + + def _set_params(self, x): + self.kernel._set_params(x) + + def log_likelihood(self): + return (self.dL_dK*self.kernel.K(self.X, self.X2)).sum() + + def _log_likelihood_gradients(self): + raise NotImplementedError, "This needs to be implemented to use the kern_check_model class." + +class Kern_check_dK_dtheta(Kern_check_model): + """This class allows gradient checks for the gradient of a kernel with respect to parameters. """ + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) + + def _log_likelihood_gradients(self): + return self.kernel.dK_dtheta(self.dL_dK, self.X, self.X2) + +class Kern_check_dKdiag_dtheta(Kern_check_model): + """This class allows gradient checks of the gradient of the diagonal of a kernel with respect to the parameters.""" + def __init__(self, kernel=None, dL_dK=None, X=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None) + if dL_dK==None: + self.dL_dK = np.ones((self.X.shape[0])) + + def log_likelihood(self): + return (self.dL_dK*self.kernel.Kdiag(self.X)).sum() + + def _log_likelihood_gradients(self): + return self.kernel.dKdiag_dtheta(self.dL_dK, self.X) + +class Kern_check_dK_dX(Kern_check_model): + """This class allows gradient checks for the gradient of a kernel with respect to X. """ + def __init__(self, kernel=None, dL_dK=None, X=None, X2=None): + Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2) + + def _log_likelihood_gradients(self): + return self.kernel.dK_dX(self.dL_dK, self.X, self.X2).flatten() + + def _get_param_names(self): + names = [] + for i in range(self.X.shape[0]): + for j in range(self.X.shape[0]): + names.append('X_' +str(i) + ','+str(j)) + return names + + def _get_params(self): + return self.X.flatten() + + def _set_params(self, x): + self.X=x.reshape(self.X.shape) + + class KernelTests(unittest.TestCase): def test_kerneltie(self): K = GPy.kern.rbf(5, ARD=True) K.tie_params('.*[01]') K.constrain_fixed('2') + X = np.random.rand(5,5) Y = np.ones((5,1)) m = GPy.models.GPRegression(X,Y,K) self.assertTrue(m.checkgrad()) + def test_rbfkernel(self): + verbose = False + kern = GPy.kern.rbf(5) + self.assertTrue(Kern_check_model(kern).is_positive_definite()) + self.assertTrue(Kern_check_dK_dtheta(kern).checkgrad(verbose=verbose)) + self.assertTrue(Kern_check_dKdiag_dtheta(kern).checkgrad(verbose=verbose)) + self.assertTrue(Kern_check_dK_dX(kern).checkgrad(verbose=verbose)) + def test_fixedkernel(self): """ Fixed effect kernel test