mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 03:22:38 +02:00
Laplace now appears to be grad checking again
This commit is contained in:
parent
c6d466e72d
commit
6cbf810856
6 changed files with 43 additions and 37 deletions
|
|
@ -37,39 +37,43 @@ def student_t_approx(optimize=True, plot=True):
|
||||||
|
|
||||||
# Kernel object
|
# Kernel object
|
||||||
kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||||
kernel2 = kernel1.copy()
|
kernel2 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||||
kernel3 = kernel1.copy()
|
kernel3 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||||
kernel4 = kernel1.copy()
|
kernel4 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||||
|
|
||||||
#Gaussian GP model on clean data
|
#Gaussian GP model on clean data
|
||||||
m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1)
|
m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1)
|
||||||
# optimize
|
# optimize
|
||||||
m1.ensure_default_constraints()
|
m1.ensure_default_constraints()
|
||||||
m1.constrain_fixed('white', 1e-5)
|
m1['white'] = 1e-5
|
||||||
|
m1['white'].constrain_fixed('white')
|
||||||
m1.randomize()
|
m1.randomize()
|
||||||
|
|
||||||
#Gaussian GP model on corrupt data
|
#Gaussian GP model on corrupt data
|
||||||
m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2)
|
m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2)
|
||||||
m2.ensure_default_constraints()
|
m2.ensure_default_constraints()
|
||||||
m2.constrain_fixed('white', 1e-5)
|
m1['white'] = 1e-5
|
||||||
|
m1['white'].constrain_fixed('white')
|
||||||
m2.randomize()
|
m2.randomize()
|
||||||
|
|
||||||
#Student t GP model on clean data
|
#Student t GP model on clean data
|
||||||
t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd)
|
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
|
||||||
stu_t_likelihood = GPy.likelihoods.Laplace(Y.copy(), t_distribution)
|
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||||
m3 = GPy.models.GPRegression(X, Y.copy(), kernel3, likelihood=stu_t_likelihood)
|
m3 = GPy.core.GP(X, Y.copy(), kernel3, likelihood=t_distribution, inference_method=laplace_inf)
|
||||||
m3.ensure_default_constraints()
|
m3.ensure_default_constraints()
|
||||||
m3.constrain_bounded('t_noise', 1e-6, 10.)
|
m3['t_noise'].constrain_bounded(1e-6, 10.)
|
||||||
m3.constrain_fixed('white', 1e-5)
|
m3['white'] = 1e-5
|
||||||
|
m3['white'].constrain_fixed()
|
||||||
m3.randomize()
|
m3.randomize()
|
||||||
|
|
||||||
#Student t GP model on corrupt data
|
#Student t GP model on corrupt data
|
||||||
t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd)
|
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
|
||||||
corrupt_stu_t_likelihood = GPy.likelihoods.Laplace(Yc.copy(), t_distribution)
|
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||||
m4 = GPy.models.GPRegression(X, Yc.copy(), kernel4, likelihood=corrupt_stu_t_likelihood)
|
m4 = GPy.core.GP(X, Yc.copy(), kernel4, likelihood=t_distribution, inference_method=laplace_inf)
|
||||||
m4.ensure_default_constraints()
|
m4.ensure_default_constraints()
|
||||||
m4.constrain_bounded('t_noise', 1e-6, 10.)
|
m4['t_noise'].constrain_bounded(1e-6, 10.)
|
||||||
m4.constrain_fixed('white', 1e-5)
|
m4['white'] = 1e-5
|
||||||
|
m4['white'].constrain_fixed()
|
||||||
m4.randomize()
|
m4.randomize()
|
||||||
|
|
||||||
if optimize:
|
if optimize:
|
||||||
|
|
|
||||||
|
|
@ -281,11 +281,12 @@ def toy_poisson_rbf_1d_laplace(optimize=True, plot=True):
|
||||||
f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X))
|
f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X))
|
||||||
Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None]
|
Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None]
|
||||||
|
|
||||||
noise_model = GPy.likelihoods.poisson()
|
kern = GPy.kern.rbf(1)
|
||||||
likelihood = GPy.likelihoods.Laplace(Y,noise_model)
|
poisson_lik = GPy.likelihoods.Poisson()
|
||||||
|
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||||
|
|
||||||
# create simple GP Model
|
# create simple GP Model
|
||||||
m = GPy.models.GPRegression(X, Y, likelihood=likelihood)
|
m = GPy.core.GP(X, Y, kernel=kern, likelihood=poisson_lik, inference_method=laplace_inf)
|
||||||
|
|
||||||
if optimize:
|
if optimize:
|
||||||
m.optimize(optimizer)
|
m.optimize(optimizer)
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,8 @@
|
||||||
#http://gaussianprocess.org/gpml/code.
|
#http://gaussianprocess.org/gpml/code.
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from ...util.linalg import mdot, jitchol, pddet, dpotrs, dtrtrs, dpotri, symmetrify
|
from ...util.linalg import mdot, jitchol, dpotrs, dtrtrs, dpotri, symmetrify
|
||||||
from ...util.misc import param_to_array
|
from ...util.misc import param_to_array
|
||||||
from functools import partial as partial_func
|
|
||||||
from posterior import Posterior
|
from posterior import Posterior
|
||||||
import warnings
|
import warnings
|
||||||
from scipy import optimize
|
from scipy import optimize
|
||||||
|
|
@ -85,7 +84,6 @@ class LaplaceInference(object):
|
||||||
Ki_f = Ki_f_init.copy()
|
Ki_f = Ki_f_init.copy()
|
||||||
f = np.dot(K, Ki_f)
|
f = np.dot(K, Ki_f)
|
||||||
|
|
||||||
|
|
||||||
#define the objective function (to be maximised)
|
#define the objective function (to be maximised)
|
||||||
def obj(Ki_f, f):
|
def obj(Ki_f, f):
|
||||||
return -0.5*np.dot(Ki_f.flatten(), f.flatten()) + likelihood.logpdf(f, Y, extra_data=Y_metadata)
|
return -0.5*np.dot(Ki_f.flatten(), f.flatten()) + likelihood.logpdf(f, Y, extra_data=Y_metadata)
|
||||||
|
|
@ -205,14 +203,6 @@ class LaplaceInference(object):
|
||||||
|
|
||||||
return log_marginal, woodbury_vector, K_Wi_i, dL_dK, dL_dthetaL
|
return log_marginal, woodbury_vector, K_Wi_i, dL_dK, dL_dthetaL
|
||||||
|
|
||||||
|
|
||||||
#def likelihood_gradients(self, f_hat, K, Y, Ki_W_i, dL_dfhat, I_KW_i, likelihood, Y_metadata):
|
|
||||||
#"""
|
|
||||||
#Gradients with respect to likelihood parameters (dL_dthetaL)
|
|
||||||
|
|
||||||
#:rtype: array of derivatives (1 x num_likelihood_params)
|
|
||||||
#"""
|
|
||||||
|
|
||||||
def _compute_B_statistics(self, K, W, log_concave):
|
def _compute_B_statistics(self, K, W, log_concave):
|
||||||
"""
|
"""
|
||||||
Rasmussen suggests the use of a numerically stable positive definite matrix B
|
Rasmussen suggests the use of a numerically stable positive definite matrix B
|
||||||
|
|
@ -245,6 +235,5 @@ class LaplaceInference(object):
|
||||||
#K_Wi_i_2 , _= dpotri(L2)
|
#K_Wi_i_2 , _= dpotri(L2)
|
||||||
#symmetrify(K_Wi_i_2)
|
#symmetrify(K_Wi_i_2)
|
||||||
|
|
||||||
|
|
||||||
return K_Wi_i, L, LiW12
|
return K_Wi_i, L, LiW12
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,8 +19,11 @@ class Poisson(Likelihood):
|
||||||
.. Note::
|
.. Note::
|
||||||
Y is expected to take values in {0,1,2,...}
|
Y is expected to take values in {0,1,2,...}
|
||||||
"""
|
"""
|
||||||
def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False):
|
def __init__(self, gp_link=None):
|
||||||
super(Poisson, self).__init__(gp_link,analytical_mean,analytical_variance)
|
if gp_link is None:
|
||||||
|
gp_link = link_functions.Log_ex_1()
|
||||||
|
|
||||||
|
super(Poisson, self).__init__(gp_link, name='Poisson')
|
||||||
|
|
||||||
def _preprocess_values(self,Y):
|
def _preprocess_values(self,Y):
|
||||||
return Y
|
return Y
|
||||||
|
|
|
||||||
|
|
@ -244,7 +244,7 @@ class StudentT(Likelihood):
|
||||||
d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet
|
d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet
|
||||||
return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv))
|
return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv))
|
||||||
|
|
||||||
def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None):
|
def predictive_variance(self, mu, sigma, predictive_mean=None):
|
||||||
"""
|
"""
|
||||||
Compute predictive variance of student_t*normal p(y*|f*)p(f*)
|
Compute predictive variance of student_t*normal p(y*|f*)p(f*)
|
||||||
|
|
||||||
|
|
@ -264,7 +264,7 @@ class StudentT(Likelihood):
|
||||||
|
|
||||||
return true_var
|
return true_var
|
||||||
|
|
||||||
def _predictive_mean_analytical(self, mu, sigma):
|
def predictive_mean(self, mu, sigma):
|
||||||
"""
|
"""
|
||||||
Compute mean of the prediction
|
Compute mean of the prediction
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -86,7 +86,7 @@ class TestNoiseModels(object):
|
||||||
Generic model checker
|
Generic model checker
|
||||||
"""
|
"""
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.N = 5
|
self.N = 15
|
||||||
self.D = 3
|
self.D = 3
|
||||||
self.X = np.random.rand(self.N, self.D)*10
|
self.X = np.random.rand(self.N, self.D)*10
|
||||||
|
|
||||||
|
|
@ -104,7 +104,7 @@ class TestNoiseModels(object):
|
||||||
self.var = np.random.rand(1)
|
self.var = np.random.rand(1)
|
||||||
|
|
||||||
#Make a bigger step as lower bound can be quite curved
|
#Make a bigger step as lower bound can be quite curved
|
||||||
self.step = 1e-3
|
self.step = 1e-4
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.Y = None
|
self.Y = None
|
||||||
|
|
@ -165,11 +165,20 @@ class TestNoiseModels(object):
|
||||||
},
|
},
|
||||||
"laplace": True
|
"laplace": True
|
||||||
},
|
},
|
||||||
|
"Student_t_small_deg_free": {
|
||||||
|
"model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var),
|
||||||
|
"grad_params": {
|
||||||
|
"names": ["t_noise"],
|
||||||
|
"vals": [self.var],
|
||||||
|
"constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)]
|
||||||
|
},
|
||||||
|
"laplace": True
|
||||||
|
},
|
||||||
"Student_t_small_var": {
|
"Student_t_small_var": {
|
||||||
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
|
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
|
||||||
"grad_params": {
|
"grad_params": {
|
||||||
"names": ["t_noise"],
|
"names": ["t_noise"],
|
||||||
"vals": [0.01],
|
"vals": [0.0001],
|
||||||
"constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)]
|
"constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)]
|
||||||
},
|
},
|
||||||
"laplace": True
|
"laplace": True
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue