Merge branch 'params' of github.com:SheffieldML/GPy into params

Conflicts:
	GPy/core/parameterization/param.py
This commit is contained in:
Max Zwiessele 2014-02-10 16:02:57 +00:00
commit a13d6ca894
8 changed files with 69 additions and 61 deletions

View file

@ -153,23 +153,6 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
def _collect_gradient(self, target):
target[:] = self.gradient.flat
#===========================================================================
# Fixing Parameters:
#===========================================================================
def constrain_fixed(self, warning=True):
"""
Constrain this paramter to be fixed to the current value it carries.
:param warning: print a warning for overwriting constraints.
"""
self._highest_parent_._fix(self, warning)
fix = constrain_fixed
def unconstrain_fixed(self):
"""
This parameter will no longer be fixed.
"""
self._highest_parent_._unfix(self)
unfix = unconstrain_fixed
#===========================================================================
# Tying operations -> bugged, TODO
#===========================================================================
def tie_to(self, param):

View file

@ -94,6 +94,25 @@ class Constrainable(Nameable):
def __init__(self, name):
super(Constrainable,self).__init__(name)
#===========================================================================
# Fixing Parameters:
#===========================================================================
def constrain_fixed(self, value=None, warning=True):
"""
Constrain this paramter to be fixed to the current value it carries.
:param warning: print a warning for overwriting constraints.
"""
if value is not None:
self[:] = value
self._highest_parent_._fix(self,warning)
fix = constrain_fixed
def unconstrain_fixed(self):
"""
This parameter will no longer be fixed.
"""
self._highest_parent_._unfix(self)
unfix = unconstrain_fixed
#===========================================================================
# Constrain operations -> done
#===========================================================================
def constrain(self, transform, warning=True, update=True):

View file

@ -37,39 +37,43 @@ def student_t_approx(optimize=True, plot=True):
# Kernel object
kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
kernel2 = kernel1.copy()
kernel3 = kernel1.copy()
kernel4 = kernel1.copy()
kernel2 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
kernel3 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
kernel4 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
#Gaussian GP model on clean data
m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1)
# optimize
m1.ensure_default_constraints()
m1.constrain_fixed('white', 1e-5)
m1['white'] = 1e-5
m1['white'].constrain_fixed('white')
m1.randomize()
#Gaussian GP model on corrupt data
m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2)
m2.ensure_default_constraints()
m2.constrain_fixed('white', 1e-5)
m1['white'] = 1e-5
m1['white'].constrain_fixed('white')
m2.randomize()
#Student t GP model on clean data
t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd)
stu_t_likelihood = GPy.likelihoods.Laplace(Y.copy(), t_distribution)
m3 = GPy.models.GPRegression(X, Y.copy(), kernel3, likelihood=stu_t_likelihood)
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
m3 = GPy.core.GP(X, Y.copy(), kernel3, likelihood=t_distribution, inference_method=laplace_inf)
m3.ensure_default_constraints()
m3.constrain_bounded('t_noise', 1e-6, 10.)
m3.constrain_fixed('white', 1e-5)
m3['t_noise'].constrain_bounded(1e-6, 10.)
m3['white'] = 1e-5
m3['white'].constrain_fixed()
m3.randomize()
#Student t GP model on corrupt data
t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd)
corrupt_stu_t_likelihood = GPy.likelihoods.Laplace(Yc.copy(), t_distribution)
m4 = GPy.models.GPRegression(X, Yc.copy(), kernel4, likelihood=corrupt_stu_t_likelihood)
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
m4 = GPy.core.GP(X, Yc.copy(), kernel4, likelihood=t_distribution, inference_method=laplace_inf)
m4.ensure_default_constraints()
m4.constrain_bounded('t_noise', 1e-6, 10.)
m4.constrain_fixed('white', 1e-5)
m4['t_noise'].constrain_bounded(1e-6, 10.)
m4['white'] = 1e-5
m4['white'].constrain_fixed()
m4.randomize()
if optimize:

View file

@ -281,11 +281,12 @@ def toy_poisson_rbf_1d_laplace(optimize=True, plot=True):
f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X))
Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None]
noise_model = GPy.likelihoods.poisson()
likelihood = GPy.likelihoods.Laplace(Y,noise_model)
kern = GPy.kern.rbf(1)
poisson_lik = GPy.likelihoods.Poisson()
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
# create simple GP Model
m = GPy.models.GPRegression(X, Y, likelihood=likelihood)
m = GPy.core.GP(X, Y, kernel=kern, likelihood=poisson_lik, inference_method=laplace_inf)
if optimize:
m.optimize(optimizer)

View file

@ -11,9 +11,8 @@
#http://gaussianprocess.org/gpml/code.
import numpy as np
from ...util.linalg import mdot, jitchol, pddet, dpotrs, dtrtrs, dpotri, symmetrify
from ...util.linalg import mdot, jitchol, dpotrs, dtrtrs, dpotri, symmetrify
from ...util.misc import param_to_array
from functools import partial as partial_func
from posterior import Posterior
import warnings
from scipy import optimize
@ -85,7 +84,6 @@ class LaplaceInference(object):
Ki_f = Ki_f_init.copy()
f = np.dot(K, Ki_f)
#define the objective function (to be maximised)
def obj(Ki_f, f):
return -0.5*np.dot(Ki_f.flatten(), f.flatten()) + likelihood.logpdf(f, Y, extra_data=Y_metadata)
@ -205,14 +203,6 @@ class LaplaceInference(object):
return log_marginal, woodbury_vector, K_Wi_i, dL_dK, dL_dthetaL
#def likelihood_gradients(self, f_hat, K, Y, Ki_W_i, dL_dfhat, I_KW_i, likelihood, Y_metadata):
#"""
#Gradients with respect to likelihood parameters (dL_dthetaL)
#:rtype: array of derivatives (1 x num_likelihood_params)
#"""
def _compute_B_statistics(self, K, W, log_concave):
"""
Rasmussen suggests the use of a numerically stable positive definite matrix B
@ -245,6 +235,5 @@ class LaplaceInference(object):
#K_Wi_i_2 , _= dpotri(L2)
#symmetrify(K_Wi_i_2)
return K_Wi_i, L, LiW12

View file

@ -19,8 +19,11 @@ class Poisson(Likelihood):
.. Note::
Y is expected to take values in {0,1,2,...}
"""
def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False):
super(Poisson, self).__init__(gp_link,analytical_mean,analytical_variance)
def __init__(self, gp_link=None):
if gp_link is None:
gp_link = link_functions.Log_ex_1()
super(Poisson, self).__init__(gp_link, name='Poisson')
def _preprocess_values(self,Y):
return Y

View file

@ -244,7 +244,7 @@ class StudentT(Likelihood):
d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet
return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv))
def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None):
def predictive_variance(self, mu, sigma, predictive_mean=None):
"""
Compute predictive variance of student_t*normal p(y*|f*)p(f*)
@ -264,7 +264,7 @@ class StudentT(Likelihood):
return true_var
def _predictive_mean_analytical(self, mu, sigma):
def predictive_mean(self, mu, sigma):
"""
Compute mean of the prediction
"""

View file

@ -86,7 +86,7 @@ class TestNoiseModels(object):
Generic model checker
"""
def setUp(self):
self.N = 5
self.N = 15
self.D = 3
self.X = np.random.rand(self.N, self.D)*10
@ -104,7 +104,7 @@ class TestNoiseModels(object):
self.var = np.random.rand(1)
#Make a bigger step as lower bound can be quite curved
self.step = 1e-3
self.step = 1e-4
def tearDown(self):
self.Y = None
@ -165,11 +165,20 @@ class TestNoiseModels(object):
},
"laplace": True
},
"Student_t_small_deg_free": {
"model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var),
"grad_params": {
"names": ["t_noise"],
"vals": [self.var],
"constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)]
},
"laplace": True
},
"Student_t_small_var": {
"model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var),
"grad_params": {
"names": ["t_noise"],
"vals": [0.01],
"vals": [0.0001],
"constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)]
},
"laplace": True