mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-12 05:22:38 +02:00
Print fixes for Python 3
This commit is contained in:
parent
4c3d68b761
commit
09c93e62d0
8 changed files with 90 additions and 90 deletions
|
|
@ -46,20 +46,20 @@ def test_models():
|
|||
for loader, module_name, is_pkg in pkgutil.iter_modules([examples_path]):
|
||||
# Load examples
|
||||
module_examples = loader.find_module(module_name).load_module(module_name)
|
||||
print "MODULE", module_examples
|
||||
print "Before"
|
||||
print inspect.getmembers(module_examples, predicate=inspect.isfunction)
|
||||
print("MODULE", module_examples)
|
||||
print("Before")
|
||||
print(inspect.getmembers(module_examples, predicate=inspect.isfunction))
|
||||
functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ][::-1]
|
||||
print "After"
|
||||
print functions
|
||||
print("After")
|
||||
print(functions)
|
||||
for example in functions:
|
||||
if example[0] in ['epomeo_gpx']:
|
||||
#These are the edge cases that we might want to handle specially
|
||||
if example[0] == 'epomeo_gpx' and not GPy.util.datasets.gpxpy_available:
|
||||
print "Skipping as gpxpy is not available to parse GPS"
|
||||
print("Skipping as gpxpy is not available to parse GPS")
|
||||
continue
|
||||
|
||||
print "Testing example: ", example[0]
|
||||
print("Testing example: ", example[0])
|
||||
# Generate model
|
||||
|
||||
try:
|
||||
|
|
@ -69,7 +69,7 @@ def test_models():
|
|||
except Exception as e:
|
||||
failing_models[example[0]] = "Cannot make model: \n{e}".format(e=e)
|
||||
else:
|
||||
print models
|
||||
print(models)
|
||||
model_checkgrads.description = 'test_checkgrads_%s' % example[0]
|
||||
try:
|
||||
for model in models:
|
||||
|
|
@ -89,17 +89,17 @@ def test_models():
|
|||
#yield model_checkgrads, model
|
||||
#yield model_instance, model
|
||||
|
||||
print "Finished checking module {m}".format(m=module_name)
|
||||
print("Finished checking module {m}".format(m=module_name))
|
||||
if len(failing_models.keys()) > 0:
|
||||
print "Failing models: "
|
||||
print failing_models
|
||||
print("Failing models: ")
|
||||
print(failing_models)
|
||||
|
||||
if len(failing_models.keys()) > 0:
|
||||
print failing_models
|
||||
print(failing_models)
|
||||
raise Exception(failing_models)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
# unittest.main()
|
||||
test_models()
|
||||
|
|
|
|||
|
|
@ -127,8 +127,8 @@ class Test(unittest.TestCase):
|
|||
self.assertEqual(self.view.size, 5)
|
||||
|
||||
def test_print(self):
|
||||
print self.param_index
|
||||
print self.view
|
||||
print(self.param_index)
|
||||
print(self.view)
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_index_view']
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class Kern_check_model(GPy.core.Model):
|
|||
def is_positive_semi_definite(self):
|
||||
v = np.linalg.eig(self.kernel.K(self.X))[0]
|
||||
if any(v.real<=-1e-10):
|
||||
print v.real.min()
|
||||
print(v.real.min())
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
|
@ -126,7 +126,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Positive definite check failed for " + kern.name + " covariance function.")
|
||||
print(("Positive definite check failed for " + kern.name + " covariance function."))
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
return False
|
||||
|
|
@ -137,7 +137,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -149,7 +149,7 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -162,11 +162,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("update_gradients_diag not implemented for " + kern.name)
|
||||
print(("update_gradients_diag not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -182,11 +182,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
testmodel.checkgrad(verbose=True)
|
||||
import ipdb;ipdb.set_trace()
|
||||
assert(result)
|
||||
|
|
@ -203,11 +203,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
testmodel.checkgrad(verbose=True)
|
||||
assert(result)
|
||||
pass_checks = False
|
||||
|
|
@ -223,11 +223,11 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
print("gradients_X not implemented for " + kern.name)
|
||||
print(("gradients_X not implemented for " + kern.name))
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
print(("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
|
||||
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
assert(result)
|
||||
|
|
@ -404,7 +404,7 @@ class Coregionalize_weave_test(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
# np.random.seed(0)
|
||||
# N0 = 3
|
||||
|
|
|
|||
|
|
@ -44,8 +44,8 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
The number of parameters and N is the number of data
|
||||
Need to take a slice out from f and a slice out of df
|
||||
"""
|
||||
print "\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
|
||||
func.__name__, dfunc.__name__)
|
||||
print("\n{} likelihood: {} vs {}".format(func.im_self.__class__.__name__,
|
||||
func.__name__, dfunc.__name__))
|
||||
partial_f = dparam_partial(func, *args)
|
||||
partial_df = dparam_partial(dfunc, *args)
|
||||
gradchecking = True
|
||||
|
|
@ -57,7 +57,7 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
for fixed_val in range(dfnum):
|
||||
#dlik and dlik_dvar gives back 1 value for each
|
||||
f_ind = min(fnum, fixed_val+1) - 1
|
||||
print "fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val)
|
||||
print("fnum: {} dfnum: {} f_ind: {} fixed_val: {}".format(fnum, dfnum, f_ind, fixed_val))
|
||||
#Make grad checker with this param moving, note that set_params is NOT being called
|
||||
#The parameter is being set directly with __setattr__
|
||||
#Check only the parameter and function value we wish to check at a time
|
||||
|
|
@ -70,12 +70,12 @@ def dparam_checkgrad(func, dfunc, params, params_names, args, constraints=None,
|
|||
if grad.grep_param_names(constrain_param):
|
||||
constraint(constrain_param, grad)
|
||||
else:
|
||||
print "parameter didn't exist"
|
||||
print constrain_param, " ", constraint
|
||||
print("parameter didn't exist")
|
||||
print(constrain_param, " ", constraint)
|
||||
if randomize:
|
||||
grad.randomize()
|
||||
if verbose:
|
||||
print grad
|
||||
print(grad)
|
||||
grad.checkgrad(verbose=1)
|
||||
if not grad.checkgrad(verbose=True):
|
||||
gradchecking = False
|
||||
|
|
@ -350,8 +350,8 @@ class TestNoiseModels(object):
|
|||
#############
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_logpdf(self, model, Y, f):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
#print model._get_params()
|
||||
np.testing.assert_almost_equal(
|
||||
model.pdf(f.copy(), Y.copy()).prod(),
|
||||
|
|
@ -360,33 +360,33 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_df(self, model, Y, f):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
self.description = "\n{}".format(inspect.stack()[0][3])
|
||||
logpdf = functools.partial(model.logpdf, y=Y)
|
||||
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
|
||||
grad = GradientChecker(logpdf, dlogpdf_df, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf_df2(self, model, Y, f):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
dlogpdf_df = functools.partial(model.dlogpdf_df, y=Y)
|
||||
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
|
||||
grad = GradientChecker(dlogpdf_df, d2logpdf_df2, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d3logpdf_df3(self, model, Y, f):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
d2logpdf_df2 = functools.partial(model.d2logpdf_df2, y=Y)
|
||||
d3logpdf_df3 = functools.partial(model.d3logpdf_df3, y=Y)
|
||||
grad = GradientChecker(d2logpdf_df2, d3logpdf_df3, f.copy(), 'g')
|
||||
grad.randomize()
|
||||
print model
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
##############
|
||||
|
|
@ -394,8 +394,8 @@ class TestNoiseModels(object):
|
|||
##############
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dparams(self, model, Y, f, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.logpdf, model.dlogpdf_dtheta,
|
||||
params, params_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -404,8 +404,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_df_dparams(self, model, Y, f, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.dlogpdf_df, model.dlogpdf_df_dtheta,
|
||||
params, params_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -414,8 +414,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf2_df2_dparams(self, model, Y, f, params, params_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.d2logpdf_df2, model.d2logpdf_df2_dtheta,
|
||||
params, params_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -427,7 +427,7 @@ class TestNoiseModels(object):
|
|||
################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dlink(self, model, Y, f, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
logpdf = functools.partial(model.logpdf_link, y=Y)
|
||||
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
|
||||
grad = GradientChecker(logpdf, dlogpdf_dlink, f.copy(), 'g')
|
||||
|
|
@ -437,13 +437,13 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf_dlink2(self, model, Y, f, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
dlogpdf_dlink = functools.partial(model.dlogpdf_dlink, y=Y)
|
||||
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
|
||||
grad = GradientChecker(dlogpdf_dlink, d2logpdf_dlink2, f.copy(), 'g')
|
||||
|
|
@ -453,13 +453,13 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d3logpdf_dlink3(self, model, Y, f, link_f_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
d2logpdf_dlink2 = functools.partial(model.d2logpdf_dlink2, y=Y)
|
||||
d3logpdf_dlink3 = functools.partial(model.d3logpdf_dlink3, y=Y)
|
||||
grad = GradientChecker(d2logpdf_dlink2, d3logpdf_dlink3, f.copy(), 'g')
|
||||
|
|
@ -469,8 +469,8 @@ class TestNoiseModels(object):
|
|||
constraint('g', grad)
|
||||
|
||||
grad.randomize()
|
||||
print grad
|
||||
print model
|
||||
print(grad)
|
||||
print(model)
|
||||
assert grad.checkgrad(verbose=1)
|
||||
|
||||
#################
|
||||
|
|
@ -478,8 +478,8 @@ class TestNoiseModels(object):
|
|||
#################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_link_dparams(self, model, Y, f, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.logpdf_link, model.dlogpdf_link_dtheta,
|
||||
params, param_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -488,8 +488,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_dlogpdf_dlink_dparams(self, model, Y, f, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.dlogpdf_dlink, model.dlogpdf_dlink_dtheta,
|
||||
params, param_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -498,8 +498,8 @@ class TestNoiseModels(object):
|
|||
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_d2logpdf2_dlink2_dparams(self, model, Y, f, params, param_names, param_constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print model
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
print(model)
|
||||
assert (
|
||||
dparam_checkgrad(model.d2logpdf_dlink2, model.d2logpdf_dlink2_dtheta,
|
||||
params, param_names, args=(f, Y), constraints=param_constraints,
|
||||
|
|
@ -511,7 +511,7 @@ class TestNoiseModels(object):
|
|||
################
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_laplace_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
#Normalize
|
||||
Y = Y/Y.max()
|
||||
white_var = 1e-6
|
||||
|
|
@ -524,7 +524,7 @@ class TestNoiseModels(object):
|
|||
for constrain_param, constraint in constraints:
|
||||
constraint(constrain_param, m)
|
||||
|
||||
print m
|
||||
print(m)
|
||||
m.randomize()
|
||||
|
||||
#Set params
|
||||
|
|
@ -533,7 +533,7 @@ class TestNoiseModels(object):
|
|||
m[name] = param_vals[param_num]
|
||||
|
||||
#m.optimize(max_iters=8)
|
||||
print m
|
||||
print(m)
|
||||
#if not m.checkgrad(step=step):
|
||||
#m.checkgrad(verbose=1, step=step)
|
||||
#NOTE this test appears to be stochastic for some likelihoods (student t?)
|
||||
|
|
@ -546,7 +546,7 @@ class TestNoiseModels(object):
|
|||
###########
|
||||
@with_setup(setUp, tearDown)
|
||||
def t_ep_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, constraints):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
#Normalize
|
||||
Y = Y/Y.max()
|
||||
white_var = 1e-6
|
||||
|
|
@ -561,7 +561,7 @@ class TestNoiseModels(object):
|
|||
constraints[param_num](name, m)
|
||||
|
||||
m.randomize()
|
||||
print m
|
||||
print(m)
|
||||
assert m.checkgrad(verbose=1, step=step)
|
||||
|
||||
|
||||
|
|
@ -598,7 +598,7 @@ class LaplaceTests(unittest.TestCase):
|
|||
self.X = None
|
||||
|
||||
def test_gaussian_d2logpdf_df2_2(self):
|
||||
print "\n{}".format(inspect.stack()[0][3])
|
||||
print("\n{}".format(inspect.stack()[0][3]))
|
||||
self.Y = None
|
||||
|
||||
self.N = 2
|
||||
|
|
@ -648,16 +648,16 @@ class LaplaceTests(unittest.TestCase):
|
|||
m2.randomize()
|
||||
|
||||
if debug:
|
||||
print m1
|
||||
print m2
|
||||
print(m1)
|
||||
print(m2)
|
||||
optimizer = 'scg'
|
||||
print "Gaussian"
|
||||
print("Gaussian")
|
||||
m1.optimize(optimizer, messages=debug)
|
||||
print "Laplace Gaussian"
|
||||
print("Laplace Gaussian")
|
||||
m2.optimize(optimizer, messages=debug)
|
||||
if debug:
|
||||
print m1
|
||||
print m2
|
||||
print(m1)
|
||||
print(m2)
|
||||
|
||||
m2[:] = m1[:]
|
||||
|
||||
|
|
@ -706,5 +706,5 @@ class LaplaceTests(unittest.TestCase):
|
|||
self.assertTrue(m2.checkgrad(verbose=True))
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests"
|
||||
print("Running unit tests")
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -153,19 +153,19 @@ class MiscTests(unittest.TestCase):
|
|||
def test_big_model(self):
|
||||
m = GPy.examples.dimensionality_reduction.mrd_simulation(optimize=0, plot=0, plot_sim=0)
|
||||
m.X.fix()
|
||||
print m
|
||||
print(m)
|
||||
m.unfix()
|
||||
m.checkgrad()
|
||||
print m
|
||||
print(m)
|
||||
m.fix()
|
||||
print m
|
||||
print(m)
|
||||
m.inducing_inputs.unfix()
|
||||
print m
|
||||
print(m)
|
||||
m.checkgrad()
|
||||
m.unfix()
|
||||
m.checkgrad()
|
||||
m.checkgrad()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
def test_model_set_params(self):
|
||||
m = GPy.models.GPRegression(self.X, self.Y)
|
||||
|
|
@ -176,7 +176,7 @@ class MiscTests(unittest.TestCase):
|
|||
m['.*var'] -= .1
|
||||
np.testing.assert_equal(m.kern.lengthscale, lengthscale)
|
||||
m.optimize()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
def test_model_updates(self):
|
||||
Y1 = np.random.normal(0, 1, (40, 13))
|
||||
|
|
@ -201,7 +201,7 @@ class MiscTests(unittest.TestCase):
|
|||
Y = np.sin(X) + np.random.randn(20, 1) * 0.05
|
||||
m = GPy.models.GPRegression(X, Y)
|
||||
m.optimize()
|
||||
print m
|
||||
print(m)
|
||||
|
||||
class GradientTests(np.testing.TestCase):
|
||||
def setUp(self):
|
||||
|
|
@ -523,5 +523,5 @@ class GradientTests(np.testing.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ except:
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
try:
|
||||
import mpi4py
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ class ParameterizedTest(unittest.TestCase):
|
|||
self.p2.constrain_positive()
|
||||
|
||||
m = TestLikelihood()
|
||||
print m
|
||||
print(m)
|
||||
val = m.p1.values.copy()
|
||||
self.assert_(m.p1.is_fixed)
|
||||
self.assert_(m.constraints[GPy.constraints.Logexp()].tolist(), [1])
|
||||
|
|
@ -248,9 +248,9 @@ class ParameterizedTest(unittest.TestCase):
|
|||
self.assertEqual(m.p1, val)
|
||||
|
||||
def test_printing(self):
|
||||
print self.test1
|
||||
print self.param
|
||||
print self.test1['']
|
||||
print(self.test1)
|
||||
print(self.param)
|
||||
print(self.test1[''])
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_add_parameter']
|
||||
|
|
|
|||
|
|
@ -110,5 +110,5 @@ class PriorTests(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
print("Running unit tests, please be (very) patient...")
|
||||
unittest.main()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue