Merge branch 'devel' of github.com:SheffieldML/GPy into devel

This commit is contained in:
Nicolas 2013-06-26 17:32:41 +01:00
commit 9deb1fc1c2
28 changed files with 463 additions and 289 deletions

View file

@ -2,7 +2,7 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from model import *
from parameterised import *
from parameterized import *
import priors
from gp import GP
from sparse_gp import SparseGP

View file

@ -31,8 +31,11 @@ class GP(GPBase):
GPBase.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self._set_params(self._get_params())
def __setstate__(self, state):
GPBase.__setstate__(self, state)
def getstate(self):
return GPBase.getstate(self)
def setstate(self, state):
GPBase.setstate(self, state)
self._set_params(self._get_params())
def _set_params(self, p):

View file

@ -33,12 +33,12 @@ class GPBase(Model):
# All leaf nodes should call self._set_params(self._get_params()) at
# the end
def __getstate__(self):
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return Model.__getstate__(self) + [self.X,
return Model.getstate(self) + [self.X,
self.num_data,
self.input_dim,
self.kern,
@ -47,7 +47,7 @@ class GPBase(Model):
self._Xoffset,
self._Xscale]
def __setstate__(self, state):
def setstate(self, state):
self._Xscale = state.pop()
self._Xoffset = state.pop()
self.output_dim = state.pop()
@ -56,7 +56,7 @@ class GPBase(Model):
self.input_dim = state.pop()
self.num_data = state.pop()
self.X = state.pop()
Model.__setstate__(self, state)
Model.setstate(self, state)
def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None):
"""

View file

@ -6,50 +6,49 @@ from .. import likelihoods
from ..inference import optimization
from ..util.linalg import jitchol
from GPy.util.misc import opt_wrapper
from parameterised import Parameterised
from parameterized import Parameterized
import multiprocessing as mp
import numpy as np
from GPy.core.domains import POSITIVE, REAL
from numpy.linalg.linalg import LinAlgError
# import numdifftools as ndt
class Model(Parameterised):
class Model(Parameterized):
_fail_count = 0 # Count of failed optimization steps (see objective)
_allowed_failures = 10 # number of allowed failures
def __init__(self):
Parameterised.__init__(self)
Parameterized.__init__(self)
self.priors = None
self.optimization_runs = []
self.sampling_runs = []
self.preferred_optimizer = 'scg'
# self._set_params(self._get_params()) has been taken out as it should only be called on leaf nodes
def _get_params(self):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def _set_params(self, x):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def log_likelihood(self):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def _log_likelihood_gradients(self):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def __getstate__(self):
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
Get the current state of the class.
Inherited from Parameterized, so add those parameters to the state
"""
return Parameterised.__getstate__(self) + \
return Parameterized.getstate(self) + \
[self.priors, self.optimization_runs,
self.sampling_runs, self.preferred_optimizer]
def __setstate__(self, state):
def setstate(self, state):
"""
set state from previous call to getstate
call Parameterized with the rest of the state
"""
self.preferred_optimizer = state.pop()
self.sampling_runs = state.pop()
self.optimization_runs = state.pop()
self.priors = state.pop()
Parameterised.__setstate__(self, state)
Parameterized.setstate(self, state)
def set_prior(self, regexp, what):
"""
@ -355,7 +354,7 @@ class Model(Parameterised):
return 0.5 * self._get_params().size * np.log(2 * np.pi) + self.log_likelihood() - hld
def __str__(self):
s = Parameterised.__str__(self).split('\n')
s = Parameterized.__str__(self).split('\n')
# add priors to the string
if self.priors is not None:
strs = [str(p) if p is not None else '' for p in self.priors]

View file

@ -9,7 +9,7 @@ import cPickle
import warnings
import transformations
class Parameterised(object):
class Parameterized(object):
def __init__(self):
"""
This is the base class for model and kernel. Mostly just handles tieing and constraining of parameters
@ -20,19 +20,45 @@ class Parameterised(object):
self.constrained_indices = []
self.constraints = []
def pickle(self, filename, protocol= -1):
f = file(filename, 'w')
cPickle.dump(self, f, protocol)
f.close()
def _get_params(self):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def _set_params(self, x):
raise NotImplementedError, "this needs to be implemented to use the Model class"
def pickle(self, filename, protocol=None):
if protocol is None:
if self._has_get_set_state():
protocol = 0
else:
protocol = -1
with open(filename, 'w') as f:
cPickle.dump(self, f, protocol)
def copy(self):
"""Returns a (deep) copy of the current model """
return copy.deepcopy(self)
def __getstate__(self):
if self._has_get_set_state():
return self.getstate()
return self.__dict__
def __setstate__(self, state):
if self._has_get_set_state():
return self.setstate(state)
self.__dict__ = state
def _has_get_set_state(self):
return 'getstate' in vars(self.__class__) and 'setstate' in vars(self.__class__)
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
For inheriting from Parameterized:
Allways append the state of the inherited object
and call down to the inherited object in setstate!!
"""
return [self.tied_indices,
self.fixed_indices,
@ -40,54 +66,13 @@ class Parameterised(object):
self.constrained_indices,
self.constraints]
def __setstate__(self, state):
def setstate(self, state):
self.constraints = state.pop()
self.constrained_indices = state.pop()
self.fixed_values = state.pop()
self.fixed_indices = state.pop()
self.tied_indices = state.pop()
@property
def params(self):
"""
Returns a **copy** of parameters in non transformed space
:see_also: :py:func:`GPy.core.Parameterised.params_transformed`
"""
return self._get_params()
@params.setter
def params(self, params):
self._set_params(params)
@property
def params_transformed(self):
"""
Returns a **copy** of parameters in transformed space
:see_also: :py:func:`GPy.core.Parameterised.params`
"""
return self._get_params_transformed()
@params_transformed.setter
def params_transformed(self, params):
self._set_params_transformed(params)
_get_set_deprecation = """get and set methods wont be available at next minor release
in the next releases you will get and set with following syntax:
Assume m is a model class:
print m['var'] # > prints all parameters matching 'var'
m['var'] = 2. # > sets all parameters matching 'var' to 2.
m['var'] = <array-like> # > sets parameters matching 'var' to <array-like>
"""
def get(self, regexp):
warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2)
return self[regexp]
def set(self, regexp, val):
warnings.warn(self._get_set_deprecation, FutureWarning, stacklevel=2)
self[regexp] = val
def __getitem__(self, regexp, return_names=False):
"""
Get a model parameter by name. The name is applied as a regular
@ -113,13 +98,16 @@ class Parameterised(object):
if len(matches):
val = np.array(val)
assert (val.size == 1) or val.size == len(matches), "Shape mismatch: {}:({},)".format(val.size, len(matches))
x = self.params
x = self._get_params()
x[matches] = val
self.params = x
self._set_params(x)
else:
raise AttributeError, "no parameter matches %s" % name
def tie_params(self, regexp):
"""
Tie (all!) parameters matching the regular expression `regexp`.
"""
matches = self.grep_param_names(regexp)
assert matches.size > 0, "need at least something to tie together"
if len(self.tied_indices):

View file

@ -50,22 +50,22 @@ class SparseGP(GPBase):
if self.has_uncertain_inputs:
self.X_variance /= np.square(self._Xscale)
def __getstate__(self):
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return GPBase.__getstate__(self) + [self.Z,
return GPBase.getstate(self) + [self.Z,
self.num_inducing,
self.has_uncertain_inputs,
self.X_variance]
def __setstate__(self, state):
def setstate(self, state):
self.X_variance = state.pop()
self.has_uncertain_inputs = state.pop()
self.num_inducing = state.pop()
self.Z = state.pop()
GPBase.__setstate__(self, state)
GPBase.setstate(self, state)
def _compute_kernel_matrices(self):
# kernel computations, using BGPLVM notation

View file

@ -91,6 +91,14 @@ class SVIGP(GPBase):
self._param_steplength_trace = []
self._vb_steplength_trace = []
def getstate(self):
return GPBase.getstate(self)
def setstate(self, state):
return GPBase.setstate(self, state)
def _compute_kernel_matrices(self):
# kernel computations, using BGPLVM notation
self.Kmm = self.kern.K(self.Z)

View file

@ -3,12 +3,12 @@
import numpy as np
import pylab as pb
from ..core.parameterised import Parameterised
from ..core.parameterized import Parameterized
from parts.kernpart import Kernpart
import itertools
from parts.prod import Prod as prod
class kern(Parameterised):
class kern(Parameterized):
def __init__(self, input_dim, parts=[], input_slices=None):
"""
This is the main kernel class for GPy. It handles multiple (additive) kernel functions, and keeps track of variaous things like which parameters live where.
@ -41,14 +41,14 @@ class kern(Parameterised):
self.compute_param_slices()
Parameterised.__init__(self)
Parameterized.__init__(self)
def __getstate__(self):
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return Parameterised.__getstate__(self) + [self.parts,
return Parameterized.getstate(self) + [self.parts,
self.Nparts,
self.num_params,
self.input_dim,
@ -56,14 +56,14 @@ class kern(Parameterised):
self.param_slices
]
def __setstate__(self, state):
def setstate(self, state):
self.param_slices = state.pop()
self.input_slices = state.pop()
self.input_dim = state.pop()
self.num_params = state.pop()
self.Nparts = state.pop()
self.parts = state.pop()
Parameterised.__setstate__(self, state)
Parameterized.setstate(self, state)
def plot_ARD(self, fignum=None, ax=None, title=None):

View file

@ -48,16 +48,16 @@ class BayesianGPLVM(SparseGP, GPLVM):
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs)
self.ensure_default_constraints()
def __getstate__(self):
def getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return SparseGP.__getstate__(self) + [self.init]
return SparseGP.getstate(self) + [self.init]
def __setstate__(self, state):
def setstate(self, state):
self.init = state.pop()
SparseGP.__setstate__(self, state)
SparseGP.setstate(self, state)
def _get_param_names(self):
X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])

View file

@ -25,11 +25,20 @@ class GPRegression(GP):
"""
def __init__(self,X,Y,kernel=None,normalize_X=False,normalize_Y=False):
def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False):
if kernel is None:
kernel = kern.rbf(X.shape[1])
likelihood = likelihoods.Gaussian(Y,normalize=normalize_Y)
likelihood = likelihoods.Gaussian(Y, normalize=normalize_Y)
GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self.ensure_default_constraints()
def getstate(self):
return GP.getstate(self)
def setstate(self, state):
return GP.setstate(self, state)
pass

View file

@ -84,8 +84,8 @@ class MRD(Model):
Model.__init__(self)
self.ensure_default_constraints()
def __getstate__(self):
return Model.__getstate__(self) + [self.names,
def getstate(self):
return Model.getstate(self) + [self.names,
self.bgplvms,
self.gref,
self.nparams,
@ -95,7 +95,7 @@ class MRD(Model):
self.NQ,
self.MQ]
def __setstate__(self, state):
def setstate(self, state):
self.MQ = state.pop()
self.NQ = state.pop()
self.num_data = state.pop()
@ -105,7 +105,7 @@ class MRD(Model):
self.gref = state.pop()
self.bgplvms = state.pop()
self.names = state.pop()
Model.__setstate__(self, state)
Model.setstate(self, state)
@property
def X(self):

View file

@ -28,7 +28,7 @@ class SparseGPClassification(SparseGP):
def __init__(self, X, Y=None, likelihood=None, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10):
if kernel is None:
kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1],1e-3)
kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1], 1e-3)
if likelihood is None:
distribution = likelihoods.likelihood_functions.Binomial()
@ -41,7 +41,16 @@ class SparseGPClassification(SparseGP):
i = np.random.permutation(X.shape[0])[:num_inducing]
Z = X[i].copy()
else:
assert Z.shape[1]==X.shape[1]
assert Z.shape[1] == X.shape[1]
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X)
self.ensure_default_constraints()
def getstate(self):
return SparseGP.getstate(self)
def setstate(self, state):
return SparseGP.setstate(self, state)
pass

View file

@ -43,3 +43,13 @@ class SparseGPRegression(SparseGP):
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X, X_variance=X_variance)
self.ensure_default_constraints()
pass
def getstate(self):
return SparseGP.getstate(self)
def setstate(self, state):
return SparseGP.setstate(self, state)
pass

View file

@ -28,6 +28,14 @@ class SparseGPLVM(SparseGPRegression, GPLVM):
SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing)
self.ensure_default_constraints()
def getstate(self):
return SparseGPRegression.getstate(self)
def setstate(self, state):
return SparseGPRegression.setstate(self, state)
def _get_param_names(self):
return (sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
+ SparseGPRegression._get_param_names(self))

View file

@ -42,3 +42,11 @@ class SVIGPRegression(SVIGP):
SVIGP.__init__(self, X, likelihood, kernel, Z, q_u=q_u, batchsize=batchsize)
self.load_batch()
def getstate(self):
return GPBase.getstate(self)
def setstate(self, state):
return GPBase.setstate(self, state)

View file

@ -28,6 +28,14 @@ class WarpedGP(GP):
GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self._set_params(self._get_params())
def getstate(self):
return GP.getstate(self)
def setstate(self, state):
return GP.setstate(self, state)
def _scale_data(self, Y):
self._Ymax = Y.max()
self._Ymin = Y.min()