From 6cbf810856b9a26d0922a962277bee8f0c0cd93d Mon Sep 17 00:00:00 2001 From: Alan Saul Date: Mon, 10 Feb 2014 15:40:06 +0000 Subject: [PATCH] Laplace now appears to be grad checking again --- GPy/examples/non_gaussian.py | 34 +++++++++++-------- GPy/examples/regression.py | 7 ++-- .../latent_function_inference/laplace.py | 13 +------ GPy/likelihoods/poisson.py | 7 ++-- GPy/likelihoods/student_t.py | 4 +-- GPy/testing/likelihood_tests.py | 15 ++++++-- 6 files changed, 43 insertions(+), 37 deletions(-) diff --git a/GPy/examples/non_gaussian.py b/GPy/examples/non_gaussian.py index bda80137..23122691 100644 --- a/GPy/examples/non_gaussian.py +++ b/GPy/examples/non_gaussian.py @@ -37,39 +37,43 @@ def student_t_approx(optimize=True, plot=True): # Kernel object kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) - kernel2 = kernel1.copy() - kernel3 = kernel1.copy() - kernel4 = kernel1.copy() + kernel2 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel3 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel4 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) #Gaussian GP model on clean data m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1) # optimize m1.ensure_default_constraints() - m1.constrain_fixed('white', 1e-5) + m1['white'] = 1e-5 + m1['white'].constrain_fixed('white') m1.randomize() #Gaussian GP model on corrupt data m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2) m2.ensure_default_constraints() - m2.constrain_fixed('white', 1e-5) + m1['white'] = 1e-5 + m1['white'].constrain_fixed('white') m2.randomize() #Student t GP model on clean data - t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) - stu_t_likelihood = GPy.likelihoods.Laplace(Y.copy(), t_distribution) - m3 = GPy.models.GPRegression(X, Y.copy(), kernel3, likelihood=stu_t_likelihood) + t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd) + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() + m3 = GPy.core.GP(X, Y.copy(), kernel3, likelihood=t_distribution, inference_method=laplace_inf) m3.ensure_default_constraints() - m3.constrain_bounded('t_noise', 1e-6, 10.) - m3.constrain_fixed('white', 1e-5) + m3['t_noise'].constrain_bounded(1e-6, 10.) + m3['white'] = 1e-5 + m3['white'].constrain_fixed() m3.randomize() #Student t GP model on corrupt data - t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) - corrupt_stu_t_likelihood = GPy.likelihoods.Laplace(Yc.copy(), t_distribution) - m4 = GPy.models.GPRegression(X, Yc.copy(), kernel4, likelihood=corrupt_stu_t_likelihood) + t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd) + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() + m4 = GPy.core.GP(X, Yc.copy(), kernel4, likelihood=t_distribution, inference_method=laplace_inf) m4.ensure_default_constraints() - m4.constrain_bounded('t_noise', 1e-6, 10.) - m4.constrain_fixed('white', 1e-5) + m4['t_noise'].constrain_bounded(1e-6, 10.) + m4['white'] = 1e-5 + m4['white'].constrain_fixed() m4.randomize() if optimize: diff --git a/GPy/examples/regression.py b/GPy/examples/regression.py index 65a50f0e..4dea1342 100644 --- a/GPy/examples/regression.py +++ b/GPy/examples/regression.py @@ -281,11 +281,12 @@ def toy_poisson_rbf_1d_laplace(optimize=True, plot=True): f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X)) Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None] - noise_model = GPy.likelihoods.poisson() - likelihood = GPy.likelihoods.Laplace(Y,noise_model) + kern = GPy.kern.rbf(1) + poisson_lik = GPy.likelihoods.Poisson() + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() # create simple GP Model - m = GPy.models.GPRegression(X, Y, likelihood=likelihood) + m = GPy.core.GP(X, Y, kernel=kern, likelihood=poisson_lik, inference_method=laplace_inf) if optimize: m.optimize(optimizer) diff --git a/GPy/inference/latent_function_inference/laplace.py b/GPy/inference/latent_function_inference/laplace.py index 82313eab..bc81a86a 100644 --- a/GPy/inference/latent_function_inference/laplace.py +++ b/GPy/inference/latent_function_inference/laplace.py @@ -11,9 +11,8 @@ #http://gaussianprocess.org/gpml/code. import numpy as np -from ...util.linalg import mdot, jitchol, pddet, dpotrs, dtrtrs, dpotri, symmetrify +from ...util.linalg import mdot, jitchol, dpotrs, dtrtrs, dpotri, symmetrify from ...util.misc import param_to_array -from functools import partial as partial_func from posterior import Posterior import warnings from scipy import optimize @@ -85,7 +84,6 @@ class LaplaceInference(object): Ki_f = Ki_f_init.copy() f = np.dot(K, Ki_f) - #define the objective function (to be maximised) def obj(Ki_f, f): return -0.5*np.dot(Ki_f.flatten(), f.flatten()) + likelihood.logpdf(f, Y, extra_data=Y_metadata) @@ -205,14 +203,6 @@ class LaplaceInference(object): return log_marginal, woodbury_vector, K_Wi_i, dL_dK, dL_dthetaL - - #def likelihood_gradients(self, f_hat, K, Y, Ki_W_i, dL_dfhat, I_KW_i, likelihood, Y_metadata): - #""" - #Gradients with respect to likelihood parameters (dL_dthetaL) - - #:rtype: array of derivatives (1 x num_likelihood_params) - #""" - def _compute_B_statistics(self, K, W, log_concave): """ Rasmussen suggests the use of a numerically stable positive definite matrix B @@ -245,6 +235,5 @@ class LaplaceInference(object): #K_Wi_i_2 , _= dpotri(L2) #symmetrify(K_Wi_i_2) - return K_Wi_i, L, LiW12 diff --git a/GPy/likelihoods/poisson.py b/GPy/likelihoods/poisson.py index 355516bb..ba6915b8 100644 --- a/GPy/likelihoods/poisson.py +++ b/GPy/likelihoods/poisson.py @@ -19,8 +19,11 @@ class Poisson(Likelihood): .. Note:: Y is expected to take values in {0,1,2,...} """ - def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False): - super(Poisson, self).__init__(gp_link,analytical_mean,analytical_variance) + def __init__(self, gp_link=None): + if gp_link is None: + gp_link = link_functions.Log_ex_1() + + super(Poisson, self).__init__(gp_link, name='Poisson') def _preprocess_values(self,Y): return Y diff --git a/GPy/likelihoods/student_t.py b/GPy/likelihoods/student_t.py index e815a399..ac93f204 100644 --- a/GPy/likelihoods/student_t.py +++ b/GPy/likelihoods/student_t.py @@ -244,7 +244,7 @@ class StudentT(Likelihood): d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv)) - def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None): + def predictive_variance(self, mu, sigma, predictive_mean=None): """ Compute predictive variance of student_t*normal p(y*|f*)p(f*) @@ -264,7 +264,7 @@ class StudentT(Likelihood): return true_var - def _predictive_mean_analytical(self, mu, sigma): + def predictive_mean(self, mu, sigma): """ Compute mean of the prediction """ diff --git a/GPy/testing/likelihood_tests.py b/GPy/testing/likelihood_tests.py index d344e23d..7f48ac95 100644 --- a/GPy/testing/likelihood_tests.py +++ b/GPy/testing/likelihood_tests.py @@ -86,7 +86,7 @@ class TestNoiseModels(object): Generic model checker """ def setUp(self): - self.N = 5 + self.N = 15 self.D = 3 self.X = np.random.rand(self.N, self.D)*10 @@ -104,7 +104,7 @@ class TestNoiseModels(object): self.var = np.random.rand(1) #Make a bigger step as lower bound can be quite curved - self.step = 1e-3 + self.step = 1e-4 def tearDown(self): self.Y = None @@ -165,11 +165,20 @@ class TestNoiseModels(object): }, "laplace": True }, + "Student_t_small_deg_free": { + "model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [self.var], + "constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)] + }, + "laplace": True + }, "Student_t_small_var": { "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var), "grad_params": { "names": ["t_noise"], - "vals": [0.01], + "vals": [0.0001], "constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)] }, "laplace": True