Merge branch 'devel' into pickle

This commit is contained in:
Max Zwiessele 2013-06-25 17:44:52 +01:00
commit 1e06ca2d40
66 changed files with 768 additions and 193 deletions

View file

@ -4,6 +4,7 @@
from gp_regression import GPRegression
from gp_classification import GPClassification
from sparse_gp_regression import SparseGPRegression
from svigp_regression import SVIGPRegression
from sparse_gp_classification import SparseGPClassification
from fitc_classification import FITCClassification
from gplvm import GPLVM

View file

@ -46,7 +46,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
kernel = kern.rbf(input_dim) + kern.white(input_dim)
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs)
self._set_params(self._get_params())
self.ensure_default_constraints()
def __getstate__(self):
"""

View file

@ -44,4 +44,4 @@ class FITCClassification(FITC):
assert Z.shape[1]==X.shape[1]
FITC.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X)
self._set_params(self._get_params())
self.ensure_default_constraints()

View file

@ -38,4 +38,4 @@ class GPClassification(GP):
raise Warning, 'likelihood.data and Y are different.'
GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self._set_params(self._get_params())
self.ensure_default_constraints()

View file

@ -32,4 +32,4 @@ class GPRegression(GP):
likelihood = likelihoods.Gaussian(Y,normalize=normalize_Y)
GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self._set_params(self._get_params())
self.ensure_default_constraints()

View file

@ -33,7 +33,7 @@ class GPLVM(GP):
kernel = kern.rbf(input_dim, ARD=input_dim > 1) + kern.bias(input_dim, np.exp(-2)) + kern.white(input_dim, np.exp(-2))
likelihood = Gaussian(Y, normalize=normalize_Y)
GP.__init__(self, X, likelihood, kernel, normalize_X=False)
self._set_params(self._get_params())
self.ensure_default_constraints()
def initialise_latent(self, init, input_dim, Y):
if init == 'PCA':

View file

@ -82,7 +82,7 @@ class MRD(Model):
self.MQ = self.num_inducing * self.input_dim
Model.__init__(self)
self._set_params(self._get_params())
self.ensure_default_constraints()
def __getstate__(self):
return [self.names,

View file

@ -44,4 +44,4 @@ class SparseGPClassification(SparseGP):
assert Z.shape[1]==X.shape[1]
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X)
self._set_params(self._get_params())
self.ensure_default_constraints()

View file

@ -42,4 +42,4 @@ class SparseGPRegression(SparseGP):
likelihood = likelihoods.Gaussian(Y, normalize=normalize_Y)
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X, X_variance=X_variance)
self._set_params(self._get_params())
self.ensure_default_constraints()

View file

@ -26,6 +26,7 @@ class SparseGPLVM(SparseGPRegression, GPLVM):
def __init__(self, Y, input_dim, kernel=None, init='PCA', num_inducing=10):
X = self.initialise_latent(init, input_dim, Y)
SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing)
self.ensure_default_constraints()
def _get_param_names(self):
return (sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])

View file

@ -0,0 +1,44 @@
# Copyright (c) 2012, James Hensman
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ..core import SVIGP
from .. import likelihoods
from .. import kern
class SVIGPRegression(SVIGP):
"""
Gaussian Process model for regression
This is a thin wrapper around the SVIGP class, with a set of sensible defalts
:param X: input observations
:param Y: observed values
:param kernel: a GPy kernel, defaults to rbf+white
:param normalize_X: whether to normalize the input data before computing (predictions will be in original scales)
:type normalize_X: False|True
:param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales)
:type normalize_Y: False|True
:rtype: model object
.. Note:: Multiple independent outputs are allowed using columns of Y
"""
def __init__(self, X, Y, kernel=None, Z=None, num_inducing=10, q_u=None, batchsize=10):
# kern defaults to rbf (plus white for stability)
if kernel is None:
kernel = kern.rbf(X.shape[1], variance=1., lengthscale=4.) + kern.white(X.shape[1], 1e-3)
# Z defaults to a subset of the data
if Z is None:
i = np.random.permutation(X.shape[0])[:num_inducing]
Z = X[i].copy()
else:
assert Z.shape[1] == X.shape[1]
# likelihood defaults to Gaussian
likelihood = likelihoods.Gaussian(Y, normalize=False)
SVIGP.__init__(self, X, likelihood, kernel, Z, q_u=q_u, batchsize=batchsize)
self.load_batch()