testing a bit cleaned periodic is turned off, bc it need different tests, discontinuous still needed

This commit is contained in:
Max Zwiessele 2014-03-13 13:13:15 +00:00
parent 0d343cf0ca
commit 1f9509d979
9 changed files with 71 additions and 65 deletions

View file

@ -253,7 +253,7 @@ class Model(Parameterized):
sgd.run()
self.optimization_runs.append(sgd)
def _checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3, _debug=False):
def _checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3):
"""
Check the gradient of the ,odel by comparing to a numerical
estimate. If the verbose flag is passed, invividual
@ -349,13 +349,6 @@ class Model(Parameterized):
xx[xind] -= 2.*step
f2 = self.objective_function(xx)
numerical_gradient = (f1 - f2) / (2 * step)
if _debug:
for p in self.kern.flattened_parameters:
p._parent_._debug=True
self.gradient[xind] = numerical_gradient
self._set_params_transformed(x)
for p in self.kern.flattened_parameters:
p._parent_._debug=False
if np.all(gradient[xind]==0): ratio = (f1-f2) == gradient[xind]
else: ratio = (f1 - f2) / (2 * step * gradient[xind])
difference = np.abs((f1 - f2) / 2 / step - gradient[xind])

View file

@ -446,8 +446,8 @@ class ParamConcatenation(object):
def untie(self, *ties):
[param.untie(*ties) for param in self.params]
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3, _debug=False):
return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance, _debug=_debug)
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3):
return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance)
#checkgrad.__doc__ = Gradcheckable.checkgrad.__doc__
__lt__ = lambda self, val: self._vals() < val

View file

@ -206,7 +206,7 @@ class Gradcheckable(Parentable):
def __init__(self, *a, **kw):
super(Gradcheckable, self).__init__(*a, **kw)
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3, _debug=False):
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3):
"""
Check the gradient of this parameter with respect to the highest parent's
objective function.
@ -220,10 +220,10 @@ class Gradcheckable(Parentable):
:param flaot tolerance: the tolerance for the gradient ratio or difference.
"""
if self.has_parent():
return self._highest_parent_._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, _debug=_debug)
return self._checkgrad(self[''], verbose=verbose, step=step, tolerance=tolerance, _debug=_debug)
return self._highest_parent_._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance)
return self._checkgrad(self[''], verbose=verbose, step=step, tolerance=tolerance)
def _checkgrad(self, param, verbose=0, step=1e-6, tolerance=1e-3, _debug=False):
def _checkgrad(self, param, verbose=0, step=1e-6, tolerance=1e-3):
"""
Perform the checkgrad on the model.
TODO: this can be done more efficiently, when doing it inside here
@ -694,6 +694,10 @@ class Parameterizable(OptimizationHandlable):
elif pname not in dir(self):
self.__dict__[pname] = param
self._added_names_.add(pname)
else:
print "WARNING: added a parameter with formatted name {}, which is already a member of {} object. Trying to change the parameter name to\n {}".format(pname, self.__class__, param.name+"_")
param.name += "_"
self._add_parameter_name(param, ignore_added_names)
def _remove_parameter_name(self, param=None, pname=None):
assert param is None or pname is None, "can only delete either param by name, or the name of a param"

View file

@ -156,7 +156,7 @@ class Kern(Parameterized):
other.active_dims += self.input_dim
return self.prod(other)
def prod(self, other, name='prod'):
def prod(self, other, name='mul'):
"""
Multiply two kernels (either on the same space, or on the tensor
product of the input space).

View file

@ -17,7 +17,7 @@ class Prod(CombinationKernel):
:rtype: kernel object
"""
def __init__(self, kernels, name='prod'):
def __init__(self, kernels, name='mul'):
assert len(kernels) == 2, 'only implemented for two kernels as of yet'
super(Prod, self).__init__(kernels, name)

View file

@ -15,13 +15,13 @@ from ..likelihoods import Gaussian
class MRD(Model):
"""
Apply MRD to all given datasets Y in Ylist.
Apply MRD to all given datasets Y in Ylist.
Y_i in [n x p_i]
The samples n in the datasets need
The samples n in the datasets need
to match up, whereas the dimensionality p_d can differ.
:param [array-like] Ylist: List of datasets to apply MRD on
:param input_dim: latent dimensionality
:type input_dim: int
@ -45,13 +45,12 @@ class MRD(Model):
:param str name: the name of this model
:param [str] Ynames: the names for the datasets given, must be of equal length as Ylist or None
"""
def __init__(self, Ylist, input_dim, X=None, X_variance=None,
def __init__(self, Ylist, input_dim, X=None, X_variance=None,
initx = 'PCA', initz = 'permute',
num_inducing=10, Z=None, kernel=None,
num_inducing=10, Z=None, kernel=None,
inference_method=None, likelihood=None, name='mrd', Ynames=None):
super(MRD, self).__init__(name)
# sort out the kernels
if kernel is None:
from ..kern import RBF
@ -64,23 +63,23 @@ class MRD(Model):
self.kern = kernel
self.input_dim = input_dim
self.num_inducing = num_inducing
self.Ylist = Ylist
self._in_init_ = True
X = self._init_X(initx, Ylist)
self.Z = Param('inducing inputs', self._init_Z(initz, X))
self.num_inducing = self.Z.shape[0] # ensure M==N if M>N
if X_variance is None:
X_variance = np.random.uniform(0, .2, X.shape)
self.variational_prior = NormalPrior()
self.X = NormalPosterior(X, X_variance)
if likelihood is None:
self.likelihood = [Gaussian(name='Gaussian_noise'.format(i)) for i in range(len(Ylist))]
else: self.likelihood = likelihood
if inference_method is None:
self.inference_method= []
for y in Ylist:
@ -91,12 +90,12 @@ class MRD(Model):
else:
self.inference_method = inference_method
self.inference_method.set_limit(len(Ylist))
self.add_parameters(self.X, self.Z)
if Ynames is None:
Ynames = ['Y{}'.format(i) for i in range(len(Ylist))]
for i, n, k, l in itertools.izip(itertools.count(), Ynames, self.kern, self.likelihood):
p = Parameterized(name=n)
p.add_parameter(k)

View file

@ -227,6 +227,16 @@ class KernelGradientTestsContinuous(unittest.TestCase):
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod(self):
k = GPy.kern.Matern32([2,3]) * GPy.kern.RBF([0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Add(self):
k = GPy.kern.Matern32([2,3]) + GPy.kern.RBF([0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Matern52(self):
k = GPy.kern.Matern52(self.D)
k.randomize()
@ -242,31 +252,30 @@ class KernelGradientTestsContinuous(unittest.TestCase):
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
class KernelGradientTestsContinuous1D(unittest.TestCase):
def setUp(self):
self.N, self.D = 100, 1
self.X = np.random.randn(self.N,self.D)
self.X2 = np.random.randn(self.N+10,self.D)
continuous_kerns = ['RBF', 'Linear']
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
def test_PeriodicExponential(self):
k = GPy.kern.PeriodicExponential(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_PeriodicMatern32(self):
k = GPy.kern.PeriodicMatern32(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_PeriodicMatern52(self):
k = GPy.kern.PeriodicMatern52(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
#TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize
#TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize
# class KernelGradientTestsContinuous1D(unittest.TestCase):
# def setUp(self):
# self.N, self.D = 100, 1
# self.X = np.random.randn(self.N,self.D)
# self.X2 = np.random.randn(self.N+10,self.D)
#
# continuous_kerns = ['RBF', 'Linear']
# self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
#
# def test_PeriodicExponential(self):
# k = GPy.kern.PeriodicExponential(self.D)
# k.randomize()
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
#
# def test_PeriodicMatern32(self):
# k = GPy.kern.PeriodicMatern32(self.D)
# k.randomize()
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
#
# def test_PeriodicMatern52(self):
# k = GPy.kern.PeriodicMatern52(self.D)
# k.randomize()
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
class KernelTestsMiscellaneous(unittest.TestCase):
@ -275,7 +284,7 @@ class KernelTestsMiscellaneous(unittest.TestCase):
N, D = 100, 10
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.ones(D)
self.rbf = GPy.kern.RBF(range(2))
self.linear = GPy.kern.Linear((3,5,6))
self.linear = GPy.kern.Linear((3,6))
self.matern = GPy.kern.Matern32(np.array([2,4,7]))
self.sumkern = self.rbf + self.linear
self.sumkern += self.matern

View file

@ -541,8 +541,8 @@ class TestNoiseModels(object):
#import ipdb; ipdb.set_trace()
#NOTE this test appears to be stochastic for some likelihoods (student t?)
# appears to all be working in test mode right now...
if isinstance(model, GPy.likelihoods.StudentT):
import ipdb;ipdb.set_trace()
#if isinstance(model, GPy.likelihoods.StudentT):
# import ipdb;ipdb.set_trace()
assert m.checkgrad(step=step)
###########

View file

@ -63,10 +63,11 @@ class GradientTests(unittest.TestCase):
mlp = GPy.kern.MLP(1)
self.check_model(mlp, model_type='GPRegression', dimension=1)
def test_GPRegression_poly_1d(self):
''' Testing the GP regression with polynomial kernel with white kernel on 1d data '''
mlp = GPy.kern.Poly(1, degree=5)
self.check_model(mlp, model_type='GPRegression', dimension=1)
#TODO:
#def test_GPRegression_poly_1d(self):
# ''' Testing the GP regression with polynomial kernel with white kernel on 1d data '''
# mlp = GPy.kern.Poly(1, degree=5)
# self.check_model(mlp, model_type='GPRegression', dimension=1)
def test_GPRegression_matern52_1D(self):
''' Testing the GP regression with matern52 kernel on 1d data '''