mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-12 05:22:38 +02:00
manual merging
This commit is contained in:
commit
ea787fd376
130 changed files with 982 additions and 787 deletions
|
|
@ -46,20 +46,20 @@ def test_models():
|
|||
for loader, module_name, is_pkg in pkgutil.iter_modules([examples_path]):
|
||||
# Load examples
|
||||
module_examples = loader.find_module(module_name).load_module(module_name)
|
||||
print "MODULE", module_examples
|
||||
print "Before"
|
||||
print inspect.getmembers(module_examples, predicate=inspect.isfunction)
|
||||
print("MODULE", module_examples)
|
||||
print("Before")
|
||||
print(inspect.getmembers(module_examples, predicate=inspect.isfunction))
|
||||
functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ][::-1]
|
||||
print "After"
|
||||
print functions
|
||||
print("After")
|
||||
print(functions)
|
||||
for example in functions:
|
||||
if example[0] in ['epomeo_gpx']:
|
||||
#These are the edge cases that we might want to handle specially
|
||||
if example[0] == 'epomeo_gpx' and not GPy.util.datasets.gpxpy_available:
|
||||
print "Skipping as gpxpy is not available to parse GPS"
|
||||
print("Skipping as gpxpy is not available to parse GPS")
|
||||
continue
|
||||
|
||||
print "Testing example: ", example[0]
|
||||
print("Testing example: ", example[0])
|
||||
# Generate model
|
||||
|
||||
try:
|
||||
|
|
@ -69,7 +69,7 @@ def test_models():
|
|||
except Exception as e:
|
||||
failing_models[example[0]] = "Cannot make model: \n{e}".format(e=e)
|
||||
else:
|
||||
print models
|
||||
print(models)
|
||||
model_checkgrads.description = 'test_checkgrads_%s' % example[0]
|
||||
try:
|
||||
for model in models:
|
||||
|
|
@ -89,17 +89,17 @@ def test_models():
|
|||
#yield model_checkgrads, model
|
||||
#yield model_instance, model
|
||||
|
||||
print "Finished checking module {m}".format(m=module_name)
|
||||
print("Finished checking module {m}".format(m=module_name))
|
||||
if len(failing_models.keys()) > 0:
|
||||
print "Failing models: "
|
||||
print failing_models
|
||||
print("Failing models: ")
|
||||
print(failing_models)
|
||||
|
||||
if len(failing_models.keys()) > 0:
|
||||
print failing_models
|
||||
print(failing_models)
|
||||
raise Exception(failing_models)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
# unittest.main()
|
||||
test_models()
|
||||
|
|
|
|||
|
|
@ -121,14 +121,16 @@ class Test(unittest.TestCase):
|
|||
self.assertListEqual(removed.tolist(), [0, 2])
|
||||
|
||||
def test_misc(self):
|
||||
for k,v in self.param_index.copy()._properties.iteritems():
|
||||
#py3 fix
|
||||
#for k,v in self.param_index.copy()._properties.iteritems():
|
||||
for k,v in self.param_index.copy()._properties.items():
|
||||
self.assertListEqual(self.param_index[k].tolist(), v.tolist())
|
||||
self.assertEqual(self.param_index.size, 8)
|
||||
self.assertEqual(self.view.size, 5)
|
||||
|
||||
def test_print(self):
|
||||
print self.param_index
|
||||
print self.view
|
||||
print(self.param_index)
|
||||
print(self.view)
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_index_view']
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class InferenceXTestCase(unittest.TestCase):
|
|||
m.optimize(max_iters=10000)
|
||||
x, mi = m.infer_newX(m.Y)
|
||||
|
||||
print m.X.mean - mi.X.mean
|
||||
print(m.X.mean - mi.X.mean)
|
||||
self.assertTrue(np.allclose(m.X.mean, mi.X.mean, rtol=1e-4, atol=1e-4))
|
||||
self.assertTrue(np.allclose(m.X.variance, mi.X.variance, rtol=1e-4, atol=1e-4))
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class Kern_check_model(GPy.core.Model):
|
|||
def is_positive_semi_definite(self):
|
||||
v = np.linalg.eig(self.kernel.K(self.X))[0]
|
||||
if any(v.real<=-1e-10):
|
||||
print v.real.min()
|
||||
print(v.real.min())
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
|
@ -126,7 +126,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Positive definite check failed for " + kern.name + " covariance function.")
|
||||
print(("Positive definite check failed for " + kern.name + " covariance function."))
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
return False
|
||||
|
|
@ -137,7 +137,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -149,7 +149,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -162,11 +162,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("update_gradients_diag not implemented for " + kern.name)
|
||||
print(("update_gradients_diag not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -182,13 +182,12 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
testmodel.checkgrad(verbose=True)
|
||||
import ipdb;ipdb.set_trace()
|
||||
assert(result)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
|
@ -203,11 +202,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
testmodel.checkgrad(verbose=True)
|
||||
assert(result)
|
||||
pass_checks = False
|
||||
|
|
@ -223,11 +222,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -292,7 +291,7 @@ class KernelGradientTestsContinuous(unittest.TestCase):
|
|||
try:
|
||||
k.K(self.X)
|
||||
except AssertionError:
|
||||
raise AssertionError, "k.K(X) should run on self.D-1 dimension"
|
||||
raise AssertionError("k.K(X) should run on self.D-1 dimension")
|
||||
|
||||
def test_Matern52(self):
|
||||
k = GPy.kern.Matern52(self.D)
|
||||
|
|
@ -429,7 +428,7 @@ class KernelTestsProductWithZeroValues(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
||||
# np.random.seed(0)
|
||||
|
|
|
|||
|
|
@ -27,9 +27,9 @@ def dparam_partial(inst_func, *args):
|
|||
param
|
||||
"""
|
||||
def param_func(param_val, param_name, inst_func, args):
|
||||
#inst_func.im_self._set_params(param)
|
||||
#inst_func.im_self.add_parameter(Param(param_name, param_val))
|
||||
inst_func.im_self[param_name] = param_val
|
||||
#inst_func.__self__._set_params(param)
|
||||
#inst_func.__self__.add_parameter(Param(param_name, param_val))
|
||||
inst_func.__self__[param_name] = param_val
|
||||
return inst_func(*args)
|
||||
return functools.partial(param_func, inst_func=inst_func, args=args)
|
||||
|
||||
|
|
@ -44,8 +44,8 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
The number of parameters and N is the number of data
|
||||
Need to take a slice out from f and a slice out of df
|
||||
"""
|
||||
print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
|
||||
func.__name__, dfunc.__name__)
|
||||
print("\n{} likelihood: {} vs {}".format(func.__self__.__class__.__name__,
|
||||
func.__name__, dfunc.__name__))
|
||||
partial_f = dparam_partial(func, *args)
|
||||
partial_df = dparam_partial(dfunc, *args)
|
||||
gradchecking = True
|
||||
|
|
@ -66,7 +66,7 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
for fixed_val in range(dfnum):
|
||||
#dlik and dlik_dvar gives back 1 value for each
|
||||
f_ind = min(fnum, fixed_val+1) - 1
|
||||
print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
|
||||
print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val))
|
||||
#Make grad checker with this param moving, note that set_params is NOT being called
|
||||
#The parameter is being set directly with __setattr__
|
||||
#Check only the parameter and function value we wish to check at a time
|
||||
|
|
@ -83,12 +83,12 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
if grad.grep_param_names(constrain_param):
|
||||
constraint(constrain_param, grad)
|
||||
else:
|
||||
print "parameter didn't exist"
|
||||
print constrain_param, " ", constraint
|
||||
print("parameter didn't exist")
|
||||
print(constrain_param, " ", constraint)
|
||||
if randomize:
|
||||
grad.randomize()
|
||||
if verbose:
|
||||
print grad
|
||||
print(grad)
|
||||
grad.checkgrad(verbose=1)
|
||||
if not grad.checkgrad(verbose=True):
|
||||
gradchecking = False
|
||||
|
|
@ -297,7 +297,7 @@ class TestNoiseModels(object):
|
|||
def test_scale2_models(self):
|
||||
self.setUp()
|
||||
|
||||
for name, attributes in self.noise_models.iteritems():
|
||||
for name, attributes in self.noise_models.items():
|
||||
model = attributes["model"]
|
||||
if "grad_params" in attributes:
|
||||
params = attributes["grad_params"]
|
||||
|
|
@ -373,8 +373,8 @@ class TestNoiseModels(object):
|
|||
#############
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_logpdf(self, model, Y, f, Y_metadata):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
#print model._get_params()
|
||||
np.testing.assert_almost_equal(
|
||||
model.pdf(f.copy(), Y.copy(), Y_metadata=Y_metadata).prod(),
|
||||
|
|
@ -383,33 +383,33 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_df(self, model, Y, f, Y_metadata):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
self.description = "\n{}".format(inspect.stack()[0][3])
|
||||
logpdf = functools.partial(np.sum(model.logpdf), y=Y, Y_metadata=Y_metadata)
|
||||
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf_df2(self, model, Y, f, Y_metadata):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y, Y_metadata=Y_metadata)
|
||||
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d3logpdf_df3(self, model, Y, f, Y_metadata):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y, Y_metadata=Y_metadata)
|
||||
d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
##############
|
||||
|
|
@ -417,8 +417,8 @@ class TestNoiseModels(object):
|
|||
##############
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dparams(self, model, Y, f, Y_metadata, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.logpdf, model.dlogpdf_dtheta,
|
||||
params, params_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -427,8 +427,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_df_dparams(self, model, Y, f, Y_metadata, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.dlogpdf_df, model.dlogpdf_df_dtheta,
|
||||
params, params_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -437,8 +437,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf2_df2_dparams(self, model, Y, f, Y_metadata, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.d2logpdf_df2, model.d2logpdf_df2_dtheta,
|
||||
params, params_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -450,7 +450,7 @@ class TestNoiseModels(object):
|
|||
################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dlink(self, model, Y, f, Y_metadata, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
logpdf = functools.partial(model.logpdf_link, y=Y, Y_metadata=Y_metadata)
|
||||
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g')
|
||||
|
|
@ -460,13 +460,13 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf_dlink2(self, model, Y, f, Y_metadata, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y, Y_metadata=Y_metadata)
|
||||
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g')
|
||||
|
|
@ -476,13 +476,13 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d3logpdf_dlink3(self, model, Y, f, Y_metadata, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y, Y_metadata=Y_metadata)
|
||||
d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y, Y_metadata=Y_metadata)
|
||||
grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')
|
||||
|
|
@ -492,8 +492,8 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
#################
|
||||
|
|
@ -501,8 +501,8 @@ class TestNoiseModels(object):
|
|||
#################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_link_dparams(self, model, Y, f, Y_metadata, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.logpdf_link, model.dlogpdf_link_dtheta,
|
||||
params, param_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -511,8 +511,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dlink_dparams(self, model, Y, f, Y_metadata, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.dlogpdf_dlink, model.dlogpdf_dlink_dtheta,
|
||||
params, param_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -521,8 +521,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf2_dlink2_dparams(self, model, Y, f, Y_metadata, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.d2logpdf_dlink2, model.d2logpdf_dlink2_dtheta,
|
||||
params, param_names, args=(f, Y, Y_metadata), constraints=param_constraints,
|
||||
|
|
@ -534,7 +534,7 @@ class TestNoiseModels(object):
|
|||
################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_laplace_fit_rbf_white(self, model, X, Y, f, Y_metadata, step, param_vals, param_names, constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
#Normalize
|
||||
Y = Y/Y.max()
|
||||
white_var = 1e-5
|
||||
|
|
@ -548,7 +548,7 @@ class TestNoiseModels(object):
|
|||
for constrain_param, constraint in constraints:
|
||||
constraint(constrain_param, m)
|
||||
|
||||
print m
|
||||
print(m)
|
||||
m.randomize()
|
||||
m.randomize()
|
||||
|
||||
|
|
@ -558,7 +558,7 @@ class TestNoiseModels(object):
|
|||
m[name] = param_vals[param_num]
|
||||
|
||||
#m.optimize(max_iters=8)
|
||||
print m
|
||||
print(m)
|
||||
#if not m.checkgrad(step=step):
|
||||
#m.checkgrad(verbose=1, step=step)
|
||||
#NOTE this test appears to be stochastic for some likelihoods (student t?)
|
||||
|
|
@ -571,7 +571,7 @@ class TestNoiseModels(object):
|
|||
###########
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_ep_fit_rbf_white(self, model, X, Y, f, Y_metadata, step, param_vals, param_names, constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
#Normalize
|
||||
Y = Y/Y.max()
|
||||
white_var = 1e-6
|
||||
|
|
@ -587,7 +587,7 @@ class TestNoiseModels(object):
|
|||
constraints[param_num](name, m)
|
||||
|
||||
m.randomize()
|
||||
print m
|
||||
print(m)
|
||||
assert m.checkgrad(verbose=1, step=step)
|
||||
|
||||
|
||||
|
|
@ -624,7 +624,7 @@ class LaplaceTests(unittest.TestCase):
|
|||
self.X = None
|
||||
|
||||
def test_gaussian_d2logpdf_df2_2(self):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
self.Y = None
|
||||
|
||||
self.N = 2
|
||||
|
|
@ -673,17 +673,17 @@ class LaplaceTests(unittest.TestCase):
|
|||
m2.randomize()
|
||||
|
||||
if debug:
|
||||
print m1
|
||||
print m2
|
||||
print(m1)
|
||||
print(m2)
|
||||
|
||||
optimizer = 'scg'
|
||||
print "Gaussian"
|
||||
print("Gaussian")
|
||||
m1.optimize(optimizer, messages=debug, ipython_notebook=False)
|
||||
print "Laplace Gaussian"
|
||||
print("Laplace Gaussian")
|
||||
m2.optimize(optimizer, messages=debug, ipython_notebook=False)
|
||||
if debug:
|
||||
print m1
|
||||
print m2
|
||||
print(m1)
|
||||
print(m2)
|
||||
|
||||
m2[:] = m1[:]
|
||||
|
||||
|
|
@ -730,5 +730,5 @@ class LaplaceTests(unittest.TestCase):
|
|||
self.assertTrue(m2.checkgrad(verbose=True))
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests"
|
||||
print("Running unit tests")
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -26,11 +26,6 @@ class MappingGradChecker(GPy.core.Model):
|
|||
self.mapping.update_gradients(self.dL_dY, self.X)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MappingTests(unittest.TestCase):
|
||||
|
||||
def test_kernelmapping(self):
|
||||
|
|
@ -68,5 +63,5 @@ class MappingTests(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -153,19 +153,19 @@ class MiscTests(unittest.TestCase):
|
|||
def test_big_model(self):
|
||||
m = GPy.examples.dimensionality_reduction.mrd_simulation(optimize=0, plot=0, plot_sim=0)
|
||||
m.X.fix()
|
||||
print m
|
||||
print(m)
|
||||
m.unfix()
|
||||
m.checkgrad()
|
||||
print m
|
||||
print(m)
|
||||
m.fix()
|
||||
print m
|
||||
print(m)
|
||||
m.inducing_inputs.unfix()
|
||||
print m
|
||||
print(m)
|
||||
m.checkgrad()
|
||||
m.unfix()
|
||||
m.checkgrad()
|
||||
m.checkgrad()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
def test_model_set_params(self):
|
||||
m = GPy.models.GPRegression(self.X, self.Y)
|
||||
|
|
@ -176,7 +176,7 @@ class MiscTests(unittest.TestCase):
|
|||
m['.*var'] -= .1
|
||||
np.testing.assert_equal(m.kern.lengthscale, lengthscale)
|
||||
m.optimize()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
def test_model_updates(self):
|
||||
Y1 = np.random.normal(0, 1, (40, 13))
|
||||
|
|
@ -201,7 +201,7 @@ class MiscTests(unittest.TestCase):
|
|||
Y = np.sin(X) + np.random.randn(20, 1) * 0.05
|
||||
m = GPy.models.GPRegression(X, Y)
|
||||
m.optimize()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
class GradientTests(np.testing.TestCase):
|
||||
def setUp(self):
|
||||
|
|
@ -523,5 +523,5 @@ class GradientTests(np.testing.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ except:
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
try:
|
||||
import mpi4py
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from GPy.core.parameterization.transformations import NegativeLogexp, Logistic
|
|||
from GPy.core.parameterization.parameterized import Parameterized
|
||||
from GPy.core.parameterization.param import Param
|
||||
from GPy.core.parameterization.index_operations import ParameterIndexOperations
|
||||
from functools import reduce
|
||||
|
||||
class ArrayCoreTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
|
@ -107,7 +108,7 @@ class ParameterizedTest(unittest.TestCase):
|
|||
self.assertListEqual(self.white._fixes_.tolist(), [FIXED])
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
self.assertListEqual(self.test1.constraints[Logexp()].tolist(), range(self.param.size, self.param.size+self.rbf.size))
|
||||
self.assertListEqual(self.test1.constraints[Logexp()].tolist(), list(range(self.param.size, self.param.size+self.rbf.size)))
|
||||
|
||||
def test_remove_parameter_param_array_grad_array(self):
|
||||
val = self.test1.kern.param_array.copy()
|
||||
|
|
@ -120,15 +121,15 @@ class ParameterizedTest(unittest.TestCase):
|
|||
def test_default_constraints(self):
|
||||
self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertListEqual(self.rbf.constraints.indices()[0].tolist(), range(2))
|
||||
self.assertListEqual(self.rbf.constraints.indices()[0].tolist(), list(range(2)))
|
||||
from GPy.core.parameterization.transformations import Logexp
|
||||
kern = self.test1.kern
|
||||
self.test1.unlink_parameter(kern)
|
||||
self.assertListEqual(kern.constraints[Logexp()].tolist(), range(3))
|
||||
self.assertListEqual(kern.constraints[Logexp()].tolist(), list(range(3)))
|
||||
|
||||
def test_constraints(self):
|
||||
self.rbf.constrain(GPy.transformations.Square(), False)
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), range(self.param.size, self.param.size+self.rbf.size))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), list(range(self.param.size, self.param.size+self.rbf.size)))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp()].tolist(), [self.param.size+self.rbf.size])
|
||||
|
||||
self.test1.kern.unlink_parameter(self.rbf)
|
||||
|
|
@ -181,8 +182,8 @@ class ParameterizedTest(unittest.TestCase):
|
|||
|
||||
def test_add_parameter_in_hierarchy(self):
|
||||
self.test1.kern.rbf.link_parameter(Param("NEW", np.random.rand(2), NegativeLogexp()), 1)
|
||||
self.assertListEqual(self.test1.constraints[NegativeLogexp()].tolist(), range(self.param.size+1, self.param.size+1 + 2))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logistic(0,1)].tolist(), range(self.param.size))
|
||||
self.assertListEqual(self.test1.constraints[NegativeLogexp()].tolist(), list(range(self.param.size+1, self.param.size+1 + 2)))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logistic(0,1)].tolist(), list(range(self.param.size)))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp(0,1)].tolist(), np.r_[50, 53:55].tolist())
|
||||
|
||||
def test_regular_expression_misc(self):
|
||||
|
|
@ -240,7 +241,7 @@ class ParameterizedTest(unittest.TestCase):
|
|||
self.p2.constrain_positive()
|
||||
|
||||
m = TestLikelihood()
|
||||
print m
|
||||
print(m)
|
||||
val = m.p1.values.copy()
|
||||
self.assert_(m.p1.is_fixed)
|
||||
self.assert_(m.constraints[GPy.constraints.Logexp()].tolist(), [1])
|
||||
|
|
@ -248,9 +249,9 @@ class ParameterizedTest(unittest.TestCase):
|
|||
self.assertEqual(m.p1, val)
|
||||
|
||||
def test_printing(self):
|
||||
print self.test1
|
||||
print self.param
|
||||
print self.test1['']
|
||||
print(self.test1)
|
||||
print(self.param)
|
||||
print(self.test1[''])
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_add_parameter']
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from GPy.kern._src.static import Bias, White
|
|||
from GPy.examples.dimensionality_reduction import mrd_simulation
|
||||
from GPy.core.parameterization.variational import NormalPosterior
|
||||
from GPy.models.gp_regression import GPRegression
|
||||
from functools import reduce
|
||||
|
||||
def toy_model():
|
||||
X = np.linspace(0,1,50)[:, None]
|
||||
|
|
@ -28,18 +29,25 @@ def toy_model():
|
|||
|
||||
class ListDictTestCase(unittest.TestCase):
|
||||
def assertListDictEquals(self, d1, d2, msg=None):
|
||||
for k,v in d1.iteritems():
|
||||
#py3 fix
|
||||
#for k,v in d1.iteritems():
|
||||
for k,v in d1.items():
|
||||
self.assertListEqual(list(v), list(d2[k]), msg)
|
||||
def assertArrayListEquals(self, l1, l2):
|
||||
for a1, a2 in itertools.izip(l1,l2):
|
||||
for a1, a2 in zip(l1,l2):
|
||||
np.testing.assert_array_equal(a1, a2)
|
||||
|
||||
class Test(ListDictTestCase):
|
||||
def test_parameter_index_operations(self):
|
||||
pio = ParameterIndexOperations(dict(test1=np.array([4,3,1,6,4]), test2=np.r_[2:130]))
|
||||
piov = ParameterIndexOperationsView(pio, 20, 250)
|
||||
self.assertListDictEquals(dict(piov.items()), dict(piov.copy().iteritems()))
|
||||
self.assertListDictEquals(dict(pio.iteritems()), dict(pio.copy().items()))
|
||||
#py3 fix
|
||||
#self.assertListDictEquals(dict(piov.items()), dict(piov.copy().iteritems()))
|
||||
self.assertListDictEquals(dict(piov.items()), dict(piov.copy().items()))
|
||||
|
||||
#py3 fix
|
||||
#self.assertListDictEquals(dict(pio.iteritems()), dict(pio.copy().items()))
|
||||
self.assertListDictEquals(dict(pio.items()), dict(pio.copy().items()))
|
||||
|
||||
self.assertArrayListEquals(pio.copy().indices(), pio.indices())
|
||||
self.assertArrayListEquals(piov.copy().indices(), piov.indices())
|
||||
|
|
@ -54,7 +62,9 @@ class Test(ListDictTestCase):
|
|||
pickle.dump(piov, f)
|
||||
f.seek(0)
|
||||
pio2 = pickle.load(f)
|
||||
self.assertListDictEquals(dict(piov.items()), dict(pio2.iteritems()))
|
||||
#py3 fix
|
||||
#self.assertListDictEquals(dict(piov.items()), dict(pio2.iteritems()))
|
||||
self.assertListDictEquals(dict(piov.items()), dict(pio2.items()))
|
||||
|
||||
def test_param(self):
|
||||
param = Param('test', np.arange(4*2).reshape(4,2))
|
||||
|
|
|
|||
|
|
@ -110,5 +110,5 @@ class PriorTests(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue