diff --git a/GPy/core/gp.py b/GPy/core/gp.py index e3bbc85d..6587fdd5 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -6,7 +6,7 @@ import numpy as np import pylab as pb from .. import kern from ..util.linalg import pdinv, mdot, tdot, dpotrs, dtrtrs -#from ..util.plot import gpplot, Tango +# from ..util.plot import gpplot, Tango from ..likelihoods import EP from gp_base import GPBase @@ -46,12 +46,12 @@ class GP(GPBase): # the gradient of the likelihood wrt the covariance matrix if self.likelihood.YYT is None: - #alpha = np.dot(self.Ki, self.likelihood.Y) - alpha,_ = dpotrs(self.L, self.likelihood.Y,lower=1) + # alpha = np.dot(self.Ki, self.likelihood.Y) + alpha, _ = dpotrs(self.L, self.likelihood.Y, lower=1) self.dL_dK = 0.5 * (tdot(alpha) - self.output_dim * self.Ki) else: - #tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki) + # tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki) tmp, _ = dpotrs(self.L, np.asfortranarray(self.likelihood.YYT), lower=1) tmp, _ = dpotrs(self.L, np.asfortranarray(tmp.T), lower=1) self.dL_dK = 0.5 * (tmp - self.output_dim * self.Ki) @@ -72,7 +72,7 @@ class GP(GPBase): """ self.likelihood.restart() self.likelihood.fit_full(self.kern.K(self.X)) - self._set_params(self._get_params()) # update the GP + self._set_params(self._get_params()) # update the GP def _model_fit_term(self): """ @@ -81,7 +81,7 @@ class GP(GPBase): if self.likelihood.YYT is None: tmp, _ = dtrtrs(self.L, np.asfortranarray(self.likelihood.Y), lower=1) return -0.5 * np.sum(np.square(tmp)) - #return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y))) + # return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y))) else: return -0.5 * np.sum(np.multiply(self.Ki, self.likelihood.YYT)) @@ -104,13 +104,13 @@ class GP(GPBase): """ return np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK)))) - def _raw_predict(self, _Xnew, which_parts='all', full_cov=False,stop=False): + def _raw_predict(self, _Xnew, which_parts='all', full_cov=False, stop=False): """ Internal helper function for making predictions, does not account for normalization or likelihood """ - Kx = self.kern.K(_Xnew,self.X,which_parts=which_parts).T - #KiKx = np.dot(self.Ki, Kx) + Kx = self.kern.K(_Xnew, self.X, which_parts=which_parts).T + # KiKx = np.dot(self.Ki, Kx) KiKx, _ = dpotrs(self.L, np.asfortranarray(Kx), lower=1) mu = np.dot(KiKx.T, self.likelihood.Y) if full_cov: diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index a1d182d6..d4f63295 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -29,7 +29,7 @@ class GPBase(Model): self._Xscale = np.ones((1, self.input_dim)) super(GPBase, self).__init__() - #Model.__init__(self) + # Model.__init__(self) # All leaf nodes should call self._set_params(self._get_params()) at # the end @@ -57,7 +57,6 @@ class GPBase(Model): self.num_data = state.pop() self.X = state.pop() Model.__setstate__(self, state) - self._set_params(self._get_params()) def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): """ diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 2c9cc154..aa916940 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -43,6 +43,28 @@ class kern(Parameterised): Parameterised.__init__(self) + def __getstate__(self): + """ + Get the current state of the class, + here just all the indices, rest can get recomputed + """ + return Parameterised.__getstate__(self) + [self.parts, + self.Nparts, + self.num_params, + self.input_dim, + self.input_slices, + self.param_slices + ] + + def __setstate__(self, state): + self.param_slices = state.pop() + self.input_slices = state.pop() + self.input_dim = state.pop() + self.num_params = state.pop() + self.Nparts = state.pop() + self.parts = state.pop() + Parameterised.__setstate__(self, state) + def plot_ARD(self, fignum=None, ax=None, title=None): """If an ARD kernel is present, it bar-plots the ARD parameters""" diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index 7fddfbfd..8ea96405 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -53,7 +53,7 @@ class BayesianGPLVM(SparseGP, GPLVM): Get the current state of the class, here just all the indices, rest can get recomputed """ - return [self.init] + SparseGP.__getstate__(self) + return SparseGP.__getstate__(self) + [self.init] def __setstate__(self, state): self.init = state.pop() diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index 774face8..32bd2930 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -85,7 +85,7 @@ class MRD(Model): self.ensure_default_constraints() def __getstate__(self): - return [self.names, + return Model.__getstate__(self) + [self.names, self.bgplvms, self.gref, self.nparams, @@ -105,6 +105,7 @@ class MRD(Model): self.gref = state.pop() self.bgplvms = state.pop() self.names = state.pop() + Model.__setstate__(self, state) @property def X(self):