pickling and caching

This commit is contained in:
Max Zwiessele 2014-03-31 12:45:09 +01:00
parent 60a071f18f
commit f3b74fa85f
28 changed files with 481 additions and 686 deletions

View file

@ -2,7 +2,6 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from gplvm import GPLVM
from .. import kern
from ..core import SparseGP
from ..likelihoods import Gaussian
@ -61,18 +60,6 @@ class BayesianGPLVM(SparseGP):
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs)
self.add_parameter(self.X, index=0)
def _getstate(self):
"""
Get the current state of the class,
here just all the indices, rest can get recomputed
"""
return SparseGP._getstate(self) + [self.init]
def _setstate(self, state):
self._const_jitter = None
self.init = state.pop()
SparseGP._setstate(self, state)
def set_X_gradients(self, X, X_grad):
"""Set the gradients of the posterior distribution of X in its specific form."""
X.mean.gradient, X.variance.gradient = X_grad

View file

@ -29,8 +29,3 @@ class GPRegression(GP):
super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='GP regression', Y_metadata=Y_metadata)
def _getstate(self):
return GP._getstate(self)
def _setstate(self, state):
return GP._setstate(self, state)

View file

@ -44,12 +44,6 @@ class GPLVM(GP):
super(GPLVM, self).parameters_changed()
self.X.gradient = self.kern.gradients_X(self.grad_dict['dL_dK'], self.X, None)
def _getstate(self):
return GP._getstate(self)
def _setstate(self, state):
GP._setstate(self, state)
def jacobian(self,X):
target = np.zeros((X.shape[0],X.shape[1],self.output_dim))
for i in range(self.output_dim):

View file

@ -65,14 +65,17 @@ class MRD(Model):
from ..kern import RBF
self.kern = [RBF(input_dim, ARD=1, lengthscale=fracs[i], name='rbf'.format(i)) for i in range(len(Ylist))]
elif isinstance(kernel, Kern):
self.kern = [kernel.copy(name='{}'.format(kernel.name, i)) for i in range(len(Ylist))]
self.kern = []
for i in range(len(Ylist)):
k = kernel.copy()
self.kern.append(k)
else:
assert len(kernel) == len(Ylist), "need one kernel per output"
assert all([isinstance(k, Kern) for k in kernel]), "invalid kernel object detected!"
self.kern = kernel
if X_variance is None:
X_variance = np.random.uniform(0, .1, X.shape)
X_variance = np.random.uniform(0.1, 0.2, X.shape)
self.variational_prior = NormalPrior()
self.X = NormalPosterior(X, X_variance)
@ -108,8 +111,8 @@ class MRD(Model):
def parameters_changed(self):
self._log_marginal_likelihood = 0
self.posteriors = []
self.Z.gradient = 0.
self.X.gradient = 0.
self.Z.gradient[:] = 0.
self.X.gradient[:] = 0.
for y, k, l, i in itertools.izip(self.Ylist, self.kern, self.likelihood, self.inference_method):
posterior, lml, grad_dict = i.inference(k, self.X, self.Z, l, y)
@ -160,6 +163,8 @@ class MRD(Model):
X = np.random.randn(Ylist[0].shape[0], self.input_dim)
fracs = X.var(0)
fracs = [fracs]*self.input_dim
X -= X.mean()
X /= X.std()
return X, fracs
def _init_Z(self, init="permute", X=None):

View file

@ -46,11 +46,3 @@ class SparseGPClassification(SparseGP):
SparseGP.__init__(self, X, likelihood, kernel, Z=Z, normalize_X=normalize_X)
self.ensure_default_constraints()
def _getstate(self):
return SparseGP._getstate(self)
def _setstate(self, state):
return SparseGP._setstate(self, state)
pass

View file

@ -51,14 +51,6 @@ class SparseGPRegression(SparseGP):
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method=VarDTC())
def _getstate(self):
return SparseGP._getstate(self)
def _setstate(self, state):
return SparseGP._setstate(self, state)
class SparseGPRegressionUncertainInput(SparseGP):
"""
Gaussian Process model for regression with Gaussian variance on the inputs (X_variance)

View file

@ -28,14 +28,6 @@ class SparseGPLVM(SparseGPRegression, GPLVM):
SparseGPRegression.__init__(self, X, Y, kernel=kernel, num_inducing=num_inducing)
self.ensure_default_constraints()
def _getstate(self):
return SparseGPRegression._getstate(self)
def _setstate(self, state):
return SparseGPRegression._setstate(self, state)
def _get_param_names(self):
return (sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
+ SparseGPRegression._get_param_names(self))

View file

@ -43,10 +43,3 @@ class SVIGPRegression(SVIGP):
SVIGP.__init__(self, X, likelihood, kernel, Z, q_u=q_u, batchsize=batchsize)
self.load_batch()
def _getstate(self):
return GPBase._getstate(self)
def _setstate(self, state):
return GPBase._setstate(self, state)

View file

@ -30,14 +30,6 @@ class WarpedGP(GP):
GP.__init__(self, X, likelihood, kernel, normalize_X=normalize_X)
self._set_params(self._get_params())
def _getstate(self):
return GP._getstate(self)
def _setstate(self, state):
return GP._setstate(self, state)
def _scale_data(self, Y):
self._Ymax = Y.max()
self._Ymin = Y.min()