mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-03 08:42:39 +02:00
removing testing code from kern.py (it's now in kern_tests.py
This commit is contained in:
parent
3a0a192362
commit
6a667e749f
1 changed files with 0 additions and 207 deletions
|
|
@ -124,210 +124,3 @@ class Kern(Parameterized):
|
||||||
assert isinstance(other, Kern), "only kernels can be added to kernels..."
|
assert isinstance(other, Kern), "only kernels can be added to kernels..."
|
||||||
from prod import Prod
|
from prod import Prod
|
||||||
return Prod(self, other, tensor)
|
return Prod(self, other, tensor)
|
||||||
|
|
||||||
|
|
||||||
from GPy.core.model import Model
|
|
||||||
|
|
||||||
class Kern_check_model(Model):
|
|
||||||
"""This is a dummy model class used as a base class for checking that the gradients of a given kernel are implemented correctly. It enables checkgrad() to be called independently on a kernel."""
|
|
||||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
|
||||||
from GPy.kern import RBF
|
|
||||||
Model.__init__(self, 'kernel_test_model')
|
|
||||||
num_samples = 20
|
|
||||||
num_samples2 = 10
|
|
||||||
if kernel==None:
|
|
||||||
kernel = RBF(1)
|
|
||||||
if X==None:
|
|
||||||
X = np.random.randn(num_samples, kernel.input_dim)
|
|
||||||
if dL_dK==None:
|
|
||||||
if X2==None:
|
|
||||||
dL_dK = np.ones((X.shape[0], X.shape[0]))
|
|
||||||
else:
|
|
||||||
dL_dK = np.ones((X.shape[0], X2.shape[0]))
|
|
||||||
|
|
||||||
self.kernel=kernel
|
|
||||||
self.add_parameter(kernel)
|
|
||||||
self.X = X
|
|
||||||
self.X2 = X2
|
|
||||||
self.dL_dK = dL_dK
|
|
||||||
|
|
||||||
def is_positive_definite(self):
|
|
||||||
v = np.linalg.eig(self.kernel.K(self.X))[0]
|
|
||||||
if any(v<-10*sys.float_info.epsilon):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def log_likelihood(self):
|
|
||||||
return (self.dL_dK*self.kernel.K(self.X, self.X2)).sum()
|
|
||||||
|
|
||||||
def _log_likelihood_gradients(self):
|
|
||||||
raise NotImplementedError, "This needs to be implemented to use the kern_check_model class."
|
|
||||||
|
|
||||||
class Kern_check_dK_dtheta(Kern_check_model):
|
|
||||||
"""This class allows gradient checks for the gradient of a kernel with respect to parameters. """
|
|
||||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
|
||||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
|
|
||||||
|
|
||||||
def _log_likelihood_gradients(self):
|
|
||||||
|
|
||||||
target = np.zeros_like(self._get_params())
|
|
||||||
self.kernel._param_grad_helper(self.dL_dK, self.X, self.X2, target)
|
|
||||||
return target
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Kern_check_dKdiag_dtheta(Kern_check_model):
|
|
||||||
"""This class allows gradient checks of the gradient of the diagonal of a kernel with respect to the parameters."""
|
|
||||||
def __init__(self, kernel=None, dL_dK=None, X=None):
|
|
||||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
|
|
||||||
if dL_dK==None:
|
|
||||||
self.dL_dK = np.ones((self.X.shape[0]))
|
|
||||||
def parameters_changed(self):
|
|
||||||
self.kernel.update_gradients_full(self.dL_dK, self.X)
|
|
||||||
|
|
||||||
def log_likelihood(self):
|
|
||||||
return (self.dL_dK*self.kernel.Kdiag(self.X)).sum()
|
|
||||||
|
|
||||||
def _log_likelihood_gradients(self):
|
|
||||||
return self.kernel.dKdiag_dtheta(self.dL_dK, self.X)
|
|
||||||
|
|
||||||
class Kern_check_dK_dX(Kern_check_model):
|
|
||||||
"""This class allows gradient checks for the gradient of a kernel with respect to X. """
|
|
||||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
|
||||||
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
|
|
||||||
self.remove_parameter(kernel)
|
|
||||||
self.X = Param('X', self.X)
|
|
||||||
self.add_parameter(self.X)
|
|
||||||
def _log_likelihood_gradients(self):
|
|
||||||
return self.kernel.gradients_X(self.dL_dK, self.X, self.X2).flatten()
|
|
||||||
|
|
||||||
class Kern_check_dKdiag_dX(Kern_check_dK_dX):
|
|
||||||
"""This class allows gradient checks for the gradient of a kernel diagonal with respect to X. """
|
|
||||||
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
|
|
||||||
Kern_check_dK_dX.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
|
|
||||||
if dL_dK==None:
|
|
||||||
self.dL_dK = np.ones((self.X.shape[0]))
|
|
||||||
|
|
||||||
def log_likelihood(self):
|
|
||||||
return (self.dL_dK*self.kernel.Kdiag(self.X)).sum()
|
|
||||||
|
|
||||||
def _log_likelihood_gradients(self):
|
|
||||||
return self.kernel.dKdiag_dX(self.dL_dK, self.X).flatten()
|
|
||||||
|
|
||||||
def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|
||||||
"""
|
|
||||||
This function runs on kernels to check the correctness of their
|
|
||||||
implementation. It checks that the covariance function is positive definite
|
|
||||||
for a randomly generated data set.
|
|
||||||
|
|
||||||
:param kern: the kernel to be tested.
|
|
||||||
:type kern: GPy.kern.Kernpart
|
|
||||||
:param X: X input values to test the covariance function.
|
|
||||||
:type X: ndarray
|
|
||||||
:param X2: X2 input values to test the covariance function.
|
|
||||||
:type X2: ndarray
|
|
||||||
|
|
||||||
"""
|
|
||||||
pass_checks = True
|
|
||||||
if X==None:
|
|
||||||
X = np.random.randn(10, kern.input_dim)
|
|
||||||
if output_ind is not None:
|
|
||||||
X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0])
|
|
||||||
if X2==None:
|
|
||||||
X2 = np.random.randn(20, kern.input_dim)
|
|
||||||
if output_ind is not None:
|
|
||||||
X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0])
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking covariance function is positive definite.")
|
|
||||||
result = Kern_check_model(kern, X=X).is_positive_definite()
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Positive definite check failed for " + kern.name + " covariance function.")
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of K(X, X) wrt theta.")
|
|
||||||
result = Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=verbose)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of K(X, X2) wrt theta.")
|
|
||||||
result = Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=verbose)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of Kdiag(X) wrt theta.")
|
|
||||||
result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of K(X, X) wrt X.")
|
|
||||||
try:
|
|
||||||
result = Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=verbose)
|
|
||||||
except NotImplementedError:
|
|
||||||
result=True
|
|
||||||
if verbose:
|
|
||||||
print("gradients_X not implemented for " + kern.name)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of K(X, X2) wrt X.")
|
|
||||||
try:
|
|
||||||
result = Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=verbose)
|
|
||||||
except NotImplementedError:
|
|
||||||
result=True
|
|
||||||
if verbose:
|
|
||||||
print("gradients_X not implemented for " + kern.name)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print("Checking gradients of Kdiag(X) wrt X.")
|
|
||||||
try:
|
|
||||||
result = Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=verbose)
|
|
||||||
except NotImplementedError:
|
|
||||||
result=True
|
|
||||||
if verbose:
|
|
||||||
print("gradients_X not implemented for " + kern.name)
|
|
||||||
if result and verbose:
|
|
||||||
print("Check passed.")
|
|
||||||
if not result:
|
|
||||||
print("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
|
||||||
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
|
|
||||||
pass_checks = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
return pass_checks
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue