mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-07 19:12:40 +02:00
[pickling] _src -> src
This commit is contained in:
parent
b236896fbd
commit
4cd16a86b4
9 changed files with 69 additions and 34 deletions
|
|
@ -40,18 +40,28 @@ def load(file_or_path):
|
||||||
|
|
||||||
:param file_name: path/to/file.pickle
|
:param file_name: path/to/file.pickle
|
||||||
"""
|
"""
|
||||||
|
# This is the pickling pain when changing _src -> src
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
try:
|
||||||
if isinstance(file_or_path, basestring):
|
import cPickle as pickle
|
||||||
with open(file_or_path, 'rb') as f:
|
if isinstance(file_or_path, basestring):
|
||||||
m = pickle.load(f)
|
with open(file_or_path, 'rb') as f:
|
||||||
else:
|
m = pickle.load(f)
|
||||||
m = pickle.load(file_or_path)
|
else:
|
||||||
except:
|
m = pickle.load(file_or_path)
|
||||||
import pickle
|
except:
|
||||||
if isinstance(file_or_path, str):
|
import pickle
|
||||||
with open(file_or_path, 'rb') as f:
|
if isinstance(file_or_path, str):
|
||||||
m = pickle.load(f)
|
with open(file_or_path, 'rb') as f:
|
||||||
else:
|
m = pickle.load(f)
|
||||||
m = pickle.load(file_or_path)
|
else:
|
||||||
|
m = pickle.load(file_or_path)
|
||||||
|
except ImportError:
|
||||||
|
import sys
|
||||||
|
import inspect
|
||||||
|
sys.modules['GPy.kern._src'] = kern.src
|
||||||
|
for name, module in inspect.getmembers(kern.src):
|
||||||
|
if not name.startswith('_'):
|
||||||
|
sys.modules['GPy.kern._src.{}'.format(name)] = module
|
||||||
|
m = load(file_or_path)
|
||||||
return m
|
return m
|
||||||
|
|
|
||||||
|
|
@ -121,6 +121,10 @@ class GP(Model):
|
||||||
# W_{pp} := \texttt{Woodbury inv}
|
# W_{pp} := \texttt{Woodbury inv}
|
||||||
# p := _predictive_variable
|
# p := _predictive_variable
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self.mean_function = None
|
||||||
|
super(GP, self).__setstate__(state)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _predictive_variable(self):
|
def _predictive_variable(self):
|
||||||
return self.X
|
return self.X
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,10 @@ class Model(Parameterized):
|
||||||
self.obj_grads = None
|
self.obj_grads = None
|
||||||
self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500)
|
self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500)
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self.obj_grads = None
|
||||||
|
super(Model, self).__setstate__(state)
|
||||||
|
|
||||||
def log_likelihood(self):
|
def log_likelihood(self):
|
||||||
raise NotImplementedError("this needs to be implemented to use the model class")
|
raise NotImplementedError("this needs to be implemented to use the model class")
|
||||||
def _log_likelihood_gradients(self):
|
def _log_likelihood_gradients(self):
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ class Param(Parameterizable, ObsAr):
|
||||||
self._original_ = getattr(obj, '_original_', None)
|
self._original_ = getattr(obj, '_original_', None)
|
||||||
self._name = getattr(obj, '_name', None)
|
self._name = getattr(obj, '_name', None)
|
||||||
self._gradient_array_ = getattr(obj, '_gradient_array_', None)
|
self._gradient_array_ = getattr(obj, '_gradient_array_', None)
|
||||||
self._update_on = getattr(obj, '_update_on', None)
|
self.__update_on = getattr(obj, '__update_on', None)
|
||||||
self.constraints = getattr(obj, 'constraints', None)
|
self.constraints = getattr(obj, 'constraints', None)
|
||||||
self.priors = getattr(obj, 'priors', None)
|
self.priors = getattr(obj, 'priors', None)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,15 @@ class Updateable(Observable):
|
||||||
p.traverse(turn_updates)
|
p.traverse(turn_updates)
|
||||||
self.trigger_update()
|
self.trigger_update()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _update_on(self):
|
||||||
|
if not hasattr(self, '__update_on'):
|
||||||
|
self.__update_on = True
|
||||||
|
return self.__update_on
|
||||||
|
@_update_on.setter
|
||||||
|
def _update_on(self, update):
|
||||||
|
self.__update_on = update
|
||||||
|
|
||||||
def toggle_update(self):
|
def toggle_update(self):
|
||||||
print("deprecated: toggle_update was renamed to update_toggle for easier access")
|
print("deprecated: toggle_update was renamed to update_toggle for easier access")
|
||||||
self.update_toggle()
|
self.update_toggle()
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ class Optimizer(object):
|
||||||
:rtype: optimizer object.
|
:rtype: optimizer object.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, x_init, messages=False, model=None, max_f_eval=1e4, max_iters=1e3,
|
def __init__(self, x_init=None, messages=False, model=None, max_f_eval=1e4, max_iters=1e3,
|
||||||
ftol=None, gtol=None, xtol=None, bfgs_factor=None):
|
ftol=None, gtol=None, xtol=None, bfgs_factor=None):
|
||||||
self.opt_name = None
|
self.opt_name = None
|
||||||
self.x_init = x_init
|
self.x_init = x_init
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,8 @@
|
||||||
"""
|
"""
|
||||||
Kernel module the kernels to sit in.
|
Kernel module the kernels to sit in.
|
||||||
|
|
||||||
.. automodule:: .src
|
|
||||||
:members:
|
|
||||||
:private-members:
|
|
||||||
"""
|
"""
|
||||||
|
from . import src
|
||||||
from .src.kern import Kern
|
from .src.kern import Kern
|
||||||
from .src.add import Add
|
from .src.add import Add
|
||||||
from .src.prod import Prod
|
from .src.prod import Prod
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ class Kern(Parameterized):
|
||||||
self.active_dims = active_dims
|
self.active_dims = active_dims
|
||||||
self._all_dims_active = np.atleast_1d(active_dims).astype(int)
|
self._all_dims_active = np.atleast_1d(active_dims).astype(int)
|
||||||
|
|
||||||
assert self._all_dims_active.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, _all_dims_active={}".format(self.input_dim, self._all_dims_active.size, self._all_dims_active)
|
assert self._all_dims_active.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, active_dim={}".format(self.input_dim, self._all_dims_active.size, self._all_dims_active)
|
||||||
|
|
||||||
self._sliced_X = 0
|
self._sliced_X = 0
|
||||||
self.useGPU = self._support_GPU and useGPU
|
self.useGPU = self._support_GPU and useGPU
|
||||||
|
|
@ -62,6 +62,15 @@ class Kern(Parameterized):
|
||||||
from .psi_comp import PSICOMP_GH
|
from .psi_comp import PSICOMP_GH
|
||||||
self.psicomp = PSICOMP_GH()
|
self.psicomp = PSICOMP_GH()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _all_dims_active(self):
|
||||||
|
if not hasattr(self, '__all_dims_active'):
|
||||||
|
self.__all_dims_active = np.asanyarray(self.active_dims)
|
||||||
|
return self.__all_dims_active
|
||||||
|
@_all_dims_active.setter
|
||||||
|
def _all_dims_active(self, active_dims):
|
||||||
|
self.__all_dims_active = np.asanyarray(active_dims)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _effective_input_dim(self):
|
def _effective_input_dim(self):
|
||||||
return self._all_dims_active.size
|
return self._all_dims_active.size
|
||||||
|
|
|
||||||
|
|
@ -47,6 +47,7 @@ class RBF(Stationary):
|
||||||
return dc
|
return dc
|
||||||
|
|
||||||
def __setstate__(self, state):
|
def __setstate__(self, state):
|
||||||
|
self.use_invLengthscale = False
|
||||||
return super(RBF, self).__setstate__(state)
|
return super(RBF, self).__setstate__(state)
|
||||||
|
|
||||||
def spectrum(self, omega):
|
def spectrum(self, omega):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue