[pickling] _src -> src

This commit is contained in:
Max Zwiessele 2015-10-16 14:56:32 +01:00
parent b236896fbd
commit 4cd16a86b4
9 changed files with 69 additions and 34 deletions

View file

@ -40,6 +40,8 @@ def load(file_or_path):
:param file_name: path/to/file.pickle :param file_name: path/to/file.pickle
""" """
# This is the pickling pain when changing _src -> src
try:
try: try:
import cPickle as pickle import cPickle as pickle
if isinstance(file_or_path, basestring): if isinstance(file_or_path, basestring):
@ -54,4 +56,12 @@ def load(file_or_path):
m = pickle.load(f) m = pickle.load(f)
else: else:
m = pickle.load(file_or_path) m = pickle.load(file_or_path)
except ImportError:
import sys
import inspect
sys.modules['GPy.kern._src'] = kern.src
for name, module in inspect.getmembers(kern.src):
if not name.startswith('_'):
sys.modules['GPy.kern._src.{}'.format(name)] = module
m = load(file_or_path)
return m return m

View file

@ -121,6 +121,10 @@ class GP(Model):
# W_{pp} := \texttt{Woodbury inv} # W_{pp} := \texttt{Woodbury inv}
# p := _predictive_variable # p := _predictive_variable
def __setstate__(self, state):
self.mean_function = None
super(GP, self).__setstate__(state)
@property @property
def _predictive_variable(self): def _predictive_variable(self):
return self.X return self.X

View file

@ -30,6 +30,10 @@ class Model(Parameterized):
self.obj_grads = None self.obj_grads = None
self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500) self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500)
def __setstate__(self, state):
self.obj_grads = None
super(Model, self).__setstate__(state)
def log_likelihood(self): def log_likelihood(self):
raise NotImplementedError("this needs to be implemented to use the model class") raise NotImplementedError("this needs to be implemented to use the model class")
def _log_likelihood_gradients(self): def _log_likelihood_gradients(self):

View file

@ -90,7 +90,7 @@ class Param(Parameterizable, ObsAr):
self._original_ = getattr(obj, '_original_', None) self._original_ = getattr(obj, '_original_', None)
self._name = getattr(obj, '_name', None) self._name = getattr(obj, '_name', None)
self._gradient_array_ = getattr(obj, '_gradient_array_', None) self._gradient_array_ = getattr(obj, '_gradient_array_', None)
self._update_on = getattr(obj, '_update_on', None) self.__update_on = getattr(obj, '__update_on', None)
self.constraints = getattr(obj, 'constraints', None) self.constraints = getattr(obj, 'constraints', None)
self.priors = getattr(obj, 'priors', None) self.priors = getattr(obj, 'priors', None)

View file

@ -34,6 +34,15 @@ class Updateable(Observable):
p.traverse(turn_updates) p.traverse(turn_updates)
self.trigger_update() self.trigger_update()
@property
def _update_on(self):
if not hasattr(self, '__update_on'):
self.__update_on = True
return self.__update_on
@_update_on.setter
def _update_on(self, update):
self.__update_on = update
def toggle_update(self): def toggle_update(self):
print("deprecated: toggle_update was renamed to update_toggle for easier access") print("deprecated: toggle_update was renamed to update_toggle for easier access")
self.update_toggle() self.update_toggle()

View file

@ -27,7 +27,7 @@ class Optimizer(object):
:rtype: optimizer object. :rtype: optimizer object.
""" """
def __init__(self, x_init, messages=False, model=None, max_f_eval=1e4, max_iters=1e3, def __init__(self, x_init=None, messages=False, model=None, max_f_eval=1e4, max_iters=1e3,
ftol=None, gtol=None, xtol=None, bfgs_factor=None): ftol=None, gtol=None, xtol=None, bfgs_factor=None):
self.opt_name = None self.opt_name = None
self.x_init = x_init self.x_init = x_init

View file

@ -1,10 +1,8 @@
""" """
Kernel module the kernels to sit in. Kernel module the kernels to sit in.
.. automodule:: .src
:members:
:private-members:
""" """
from . import src
from .src.kern import Kern from .src.kern import Kern
from .src.add import Add from .src.add import Add
from .src.prod import Prod from .src.prod import Prod

View file

@ -54,7 +54,7 @@ class Kern(Parameterized):
self.active_dims = active_dims self.active_dims = active_dims
self._all_dims_active = np.atleast_1d(active_dims).astype(int) self._all_dims_active = np.atleast_1d(active_dims).astype(int)
assert self._all_dims_active.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, _all_dims_active={}".format(self.input_dim, self._all_dims_active.size, self._all_dims_active) assert self._all_dims_active.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, active_dim={}".format(self.input_dim, self._all_dims_active.size, self._all_dims_active)
self._sliced_X = 0 self._sliced_X = 0
self.useGPU = self._support_GPU and useGPU self.useGPU = self._support_GPU and useGPU
@ -62,6 +62,15 @@ class Kern(Parameterized):
from .psi_comp import PSICOMP_GH from .psi_comp import PSICOMP_GH
self.psicomp = PSICOMP_GH() self.psicomp = PSICOMP_GH()
@property
def _all_dims_active(self):
if not hasattr(self, '__all_dims_active'):
self.__all_dims_active = np.asanyarray(self.active_dims)
return self.__all_dims_active
@_all_dims_active.setter
def _all_dims_active(self, active_dims):
self.__all_dims_active = np.asanyarray(active_dims)
@property @property
def _effective_input_dim(self): def _effective_input_dim(self):
return self._all_dims_active.size return self._all_dims_active.size

View file

@ -47,6 +47,7 @@ class RBF(Stationary):
return dc return dc
def __setstate__(self, state): def __setstate__(self, state):
self.use_invLengthscale = False
return super(RBF, self).__setstate__(state) return super(RBF, self).__setstate__(state)
def spectrum(self, omega): def spectrum(self, omega):