diff --git a/GPy/core/parameterization/param.py b/GPy/core/parameterization/param.py index 4fc3aca0..583c6425 100644 --- a/GPy/core/parameterization/param.py +++ b/GPy/core/parameterization/param.py @@ -153,23 +153,6 @@ class Param(ObservableArray, Constrainable, Gradcheckable): def _collect_gradient(self, target): target[:] = self.gradient.flat #=========================================================================== - # Fixing Parameters: - #=========================================================================== - def constrain_fixed(self, warning=True): - """ - Constrain this paramter to be fixed to the current value it carries. - - :param warning: print a warning for overwriting constraints. - """ - self._highest_parent_._fix(self, warning) - fix = constrain_fixed - def unconstrain_fixed(self): - """ - This parameter will no longer be fixed. - """ - self._highest_parent_._unfix(self) - unfix = unconstrain_fixed - #=========================================================================== # Tying operations -> bugged, TODO #=========================================================================== def tie_to(self, param): @@ -419,7 +402,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable): slice_index = self._current_slice_ if isinstance(slice_index, (tuple, list)): clean_curr_slice = [s for s in slice_index if numpy.any(s != Ellipsis)] - if (all(isinstance(n, (numpy.ndarray, list, tuple)) for n in clean_curr_slice) + if (all(isinstance(n, (numpy.ndarray, list, tuple)) for n in clean_curr_slice) and len(set(map(len, clean_curr_slice))) <= 1): return numpy.fromiter(itertools.izip(*clean_curr_slice), dtype=[('', int)] * self._realndim_, count=len(clean_curr_slice[0])).view((int, self._realndim_)) @@ -438,7 +421,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable): if self._realsize_ < 2: return name ind = self._indices() - if ind.size > 4: indstr = ','.join(map(str, ind[:2])) + "..." + ','.join(map(str, ind[-2:])) + if ind.size > 4: indstr = ','.join(map(str, ind[:2])) + "..." + ','.join(map(str, ind[-2:])) else: indstr = ','.join(map(str, ind)) return name + '[' + indstr + ']' def __str__(self, constr_matrix=None, indices=None, ties=None, lc=None, lx=None, li=None, lt=None): @@ -472,7 +455,7 @@ class ParamConcatenation(object): for p in params: for p in p.flattened_parameters: if p not in self.params: - self.params.append(p) + self.params.append(p) self._param_sizes = [p.size for p in self.params] startstops = numpy.cumsum([0] + self._param_sizes) self._param_slices_ = [slice(start, stop) for start,stop in zip(startstops, startstops[1:])] @@ -587,7 +570,7 @@ if __name__ == '__main__': p = Param("q_mean", X) p1 = Param("q_variance", numpy.random.rand(*p.shape)) p2 = Param("Y", numpy.random.randn(p.shape[0], 1)) - + p3 = Param("variance", numpy.random.rand()) p4 = Param("lengthscale", numpy.random.rand(2)) diff --git a/GPy/core/parameterization/parameter_core.py b/GPy/core/parameterization/parameter_core.py index b22e14f7..7505c796 100644 --- a/GPy/core/parameterization/parameter_core.py +++ b/GPy/core/parameterization/parameter_core.py @@ -49,19 +49,19 @@ class Pickleable(object): class Parentable(object): def __init__(self, direct_parent=None, parent_index=None): - super(Parentable,self).__init__() + super(Parentable,self).__init__() self._direct_parent_ = direct_parent self._parent_index_ = parent_index def has_parent(self): return self._direct_parent_ is not None - + @property def _highest_parent_(self): if self._direct_parent_ is None: return self return self._direct_parent_._highest_parent_ - + class Nameable(Parentable): _name = None def __init__(self, name, direct_parent=None, parent_index=None): @@ -94,6 +94,25 @@ class Constrainable(Nameable): def __init__(self, name): super(Constrainable,self).__init__(name) #=========================================================================== + # Fixing Parameters: + #=========================================================================== + def constrain_fixed(self, value=None, warning=True): + """ + Constrain this paramter to be fixed to the current value it carries. + + :param warning: print a warning for overwriting constraints. + """ + if value is not None: + self[:] = value + self._highest_parent_._fix(self,warning) + fix = constrain_fixed + def unconstrain_fixed(self): + """ + This parameter will no longer be fixed. + """ + self._highest_parent_._unfix(self) + unfix = unconstrain_fixed + #=========================================================================== # Constrain operations -> done #=========================================================================== def constrain(self, transform, warning=True, update=True): diff --git a/GPy/examples/non_gaussian.py b/GPy/examples/non_gaussian.py index bda80137..23122691 100644 --- a/GPy/examples/non_gaussian.py +++ b/GPy/examples/non_gaussian.py @@ -37,39 +37,43 @@ def student_t_approx(optimize=True, plot=True): # Kernel object kernel1 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) - kernel2 = kernel1.copy() - kernel3 = kernel1.copy() - kernel4 = kernel1.copy() + kernel2 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel3 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) + kernel4 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1]) #Gaussian GP model on clean data m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1) # optimize m1.ensure_default_constraints() - m1.constrain_fixed('white', 1e-5) + m1['white'] = 1e-5 + m1['white'].constrain_fixed('white') m1.randomize() #Gaussian GP model on corrupt data m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2) m2.ensure_default_constraints() - m2.constrain_fixed('white', 1e-5) + m1['white'] = 1e-5 + m1['white'].constrain_fixed('white') m2.randomize() #Student t GP model on clean data - t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) - stu_t_likelihood = GPy.likelihoods.Laplace(Y.copy(), t_distribution) - m3 = GPy.models.GPRegression(X, Y.copy(), kernel3, likelihood=stu_t_likelihood) + t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd) + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() + m3 = GPy.core.GP(X, Y.copy(), kernel3, likelihood=t_distribution, inference_method=laplace_inf) m3.ensure_default_constraints() - m3.constrain_bounded('t_noise', 1e-6, 10.) - m3.constrain_fixed('white', 1e-5) + m3['t_noise'].constrain_bounded(1e-6, 10.) + m3['white'] = 1e-5 + m3['white'].constrain_fixed() m3.randomize() #Student t GP model on corrupt data - t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=deg_free, sigma2=edited_real_sd) - corrupt_stu_t_likelihood = GPy.likelihoods.Laplace(Yc.copy(), t_distribution) - m4 = GPy.models.GPRegression(X, Yc.copy(), kernel4, likelihood=corrupt_stu_t_likelihood) + t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd) + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() + m4 = GPy.core.GP(X, Yc.copy(), kernel4, likelihood=t_distribution, inference_method=laplace_inf) m4.ensure_default_constraints() - m4.constrain_bounded('t_noise', 1e-6, 10.) - m4.constrain_fixed('white', 1e-5) + m4['t_noise'].constrain_bounded(1e-6, 10.) + m4['white'] = 1e-5 + m4['white'].constrain_fixed() m4.randomize() if optimize: diff --git a/GPy/examples/regression.py b/GPy/examples/regression.py index 65a50f0e..4dea1342 100644 --- a/GPy/examples/regression.py +++ b/GPy/examples/regression.py @@ -281,11 +281,12 @@ def toy_poisson_rbf_1d_laplace(optimize=True, plot=True): f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X)) Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None] - noise_model = GPy.likelihoods.poisson() - likelihood = GPy.likelihoods.Laplace(Y,noise_model) + kern = GPy.kern.rbf(1) + poisson_lik = GPy.likelihoods.Poisson() + laplace_inf = GPy.inference.latent_function_inference.LaplaceInference() # create simple GP Model - m = GPy.models.GPRegression(X, Y, likelihood=likelihood) + m = GPy.core.GP(X, Y, kernel=kern, likelihood=poisson_lik, inference_method=laplace_inf) if optimize: m.optimize(optimizer) diff --git a/GPy/inference/latent_function_inference/laplace.py b/GPy/inference/latent_function_inference/laplace.py index 82313eab..bc81a86a 100644 --- a/GPy/inference/latent_function_inference/laplace.py +++ b/GPy/inference/latent_function_inference/laplace.py @@ -11,9 +11,8 @@ #http://gaussianprocess.org/gpml/code. import numpy as np -from ...util.linalg import mdot, jitchol, pddet, dpotrs, dtrtrs, dpotri, symmetrify +from ...util.linalg import mdot, jitchol, dpotrs, dtrtrs, dpotri, symmetrify from ...util.misc import param_to_array -from functools import partial as partial_func from posterior import Posterior import warnings from scipy import optimize @@ -85,7 +84,6 @@ class LaplaceInference(object): Ki_f = Ki_f_init.copy() f = np.dot(K, Ki_f) - #define the objective function (to be maximised) def obj(Ki_f, f): return -0.5*np.dot(Ki_f.flatten(), f.flatten()) + likelihood.logpdf(f, Y, extra_data=Y_metadata) @@ -205,14 +203,6 @@ class LaplaceInference(object): return log_marginal, woodbury_vector, K_Wi_i, dL_dK, dL_dthetaL - - #def likelihood_gradients(self, f_hat, K, Y, Ki_W_i, dL_dfhat, I_KW_i, likelihood, Y_metadata): - #""" - #Gradients with respect to likelihood parameters (dL_dthetaL) - - #:rtype: array of derivatives (1 x num_likelihood_params) - #""" - def _compute_B_statistics(self, K, W, log_concave): """ Rasmussen suggests the use of a numerically stable positive definite matrix B @@ -245,6 +235,5 @@ class LaplaceInference(object): #K_Wi_i_2 , _= dpotri(L2) #symmetrify(K_Wi_i_2) - return K_Wi_i, L, LiW12 diff --git a/GPy/likelihoods/poisson.py b/GPy/likelihoods/poisson.py index 355516bb..ba6915b8 100644 --- a/GPy/likelihoods/poisson.py +++ b/GPy/likelihoods/poisson.py @@ -19,8 +19,11 @@ class Poisson(Likelihood): .. Note:: Y is expected to take values in {0,1,2,...} """ - def __init__(self,gp_link=None,analytical_mean=False,analytical_variance=False): - super(Poisson, self).__init__(gp_link,analytical_mean,analytical_variance) + def __init__(self, gp_link=None): + if gp_link is None: + gp_link = link_functions.Log_ex_1() + + super(Poisson, self).__init__(gp_link, name='Poisson') def _preprocess_values(self,Y): return Y diff --git a/GPy/likelihoods/student_t.py b/GPy/likelihoods/student_t.py index e815a399..ac93f204 100644 --- a/GPy/likelihoods/student_t.py +++ b/GPy/likelihoods/student_t.py @@ -244,7 +244,7 @@ class StudentT(Likelihood): d2logpdf_dlink2_dv = np.zeros_like(d2logpdf_dlink2_dvar) #FIXME: Not done yet return np.hstack((d2logpdf_dlink2_dvar, d2logpdf_dlink2_dv)) - def _predictive_variance_analytical(self, mu, sigma, predictive_mean=None): + def predictive_variance(self, mu, sigma, predictive_mean=None): """ Compute predictive variance of student_t*normal p(y*|f*)p(f*) @@ -264,7 +264,7 @@ class StudentT(Likelihood): return true_var - def _predictive_mean_analytical(self, mu, sigma): + def predictive_mean(self, mu, sigma): """ Compute mean of the prediction """ diff --git a/GPy/testing/likelihood_tests.py b/GPy/testing/likelihood_tests.py index d344e23d..7f48ac95 100644 --- a/GPy/testing/likelihood_tests.py +++ b/GPy/testing/likelihood_tests.py @@ -86,7 +86,7 @@ class TestNoiseModels(object): Generic model checker """ def setUp(self): - self.N = 5 + self.N = 15 self.D = 3 self.X = np.random.rand(self.N, self.D)*10 @@ -104,7 +104,7 @@ class TestNoiseModels(object): self.var = np.random.rand(1) #Make a bigger step as lower bound can be quite curved - self.step = 1e-3 + self.step = 1e-4 def tearDown(self): self.Y = None @@ -165,11 +165,20 @@ class TestNoiseModels(object): }, "laplace": True }, + "Student_t_small_deg_free": { + "model": GPy.likelihoods.StudentT(deg_free=1.5, sigma2=self.var), + "grad_params": { + "names": ["t_noise"], + "vals": [self.var], + "constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)] + }, + "laplace": True + }, "Student_t_small_var": { "model": GPy.likelihoods.StudentT(deg_free=5, sigma2=self.var), "grad_params": { "names": ["t_noise"], - "vals": [0.01], + "vals": [0.0001], "constraints": [("t_noise", constrain_positive), ("deg_free", constrain_fixed)] }, "laplace": True