diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index e227625d..9ce5c391 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -49,7 +49,7 @@ class SparseGP(GP): else: #inference_method = ?? raise NotImplementedError("what to do what to do?") - print("defaulting to ", inference_method, "for latent function inference") + print(("defaulting to ", inference_method, "for latent function inference")) self.Z = Param('inducing inputs', Z) self.num_inducing = Z.shape[0] @@ -159,7 +159,7 @@ class SparseGP(GP): mu = np.dot(psi1_star, la) # TODO: dimensions? if full_cov: - raise NotImplementedError, "Full covariance for Sparse GP predicted with uncertain inputs not implemented yet." + raise NotImplementedError("Full covariance for Sparse GP predicted with uncertain inputs not implemented yet.") var = np.empty((Xnew.shape[0], la.shape[1], la.shape[1])) di = np.diag_indices(la.shape[1]) else: diff --git a/GPy/inference/latent_function_inference/laplace.py b/GPy/inference/latent_function_inference/laplace.py index 00a2c2b0..2f089141 100644 --- a/GPy/inference/latent_function_inference/laplace.py +++ b/GPy/inference/latent_function_inference/laplace.py @@ -171,7 +171,7 @@ class Laplace(LatentFunctionInference): #define the objective function (to be maximised) def obj(Ki_f, f): ll = -0.5*np.sum(np.dot(Ki_f.T, f)) + np.sum(likelihood.logpdf(f, Y, Y_metadata=Y_metadata)) - print ll + print(ll) if np.isnan(ll): import ipdb; ipdb.set_trace() # XXX BREAKPOINT return -np.inf diff --git a/GPy/likelihoods/gaussian.py b/GPy/likelihoods/gaussian.py index 424a7f5a..e1299f73 100644 --- a/GPy/likelihoods/gaussian.py +++ b/GPy/likelihoods/gaussian.py @@ -48,7 +48,7 @@ class Gaussian(Likelihood): def betaY(self,Y,Y_metadata=None): #TODO: ~Ricardo this does not live here - raise RuntimeError, "Please notify the GPy developers, this should not happen" + raise RuntimeError("Please notify the GPy developers, this should not happen") return Y/self.gaussian_variance(Y_metadata) def gaussian_variance(self, Y_metadata=None):