[pickle] load errors bc of kernel changes, backwards compatibility issues fixed

This commit is contained in:
Max Zwiessele 2015-11-09 10:09:07 +00:00
parent 6f9c5042f9
commit 850c10beaa
4 changed files with 42 additions and 53 deletions

View file

@ -52,24 +52,5 @@ def load(file_or_path):
for name, module in inspect.getmembers(kern.src): # @UndefinedVariable for name, module in inspect.getmembers(kern.src): # @UndefinedVariable
if not name.startswith('_'): if not name.startswith('_'):
sys.modules['GPy.kern._src.{}'.format(name)] = module sys.modules['GPy.kern._src.{}'.format(name)] = module
try: import paramz
import cPickle as pickle return paramz.load(file_or_path)
if isinstance(file_or_path, basestring):
with open(file_or_path, 'rb') as f:
m = pickle.load(f)
else:
m = pickle.load(file_or_path)
except: # python3
import pickle # @Reimport
if isinstance(file_or_path, str):
with open(file_or_path, 'rb') as f:
#u = pickle._Unpickler(f) # @UndefinedVariable
#u.encoding = 'latin1'
#m = u.load()
m = pickle.load(f, encoding='latin1')#
else:
#u = pickle._Unpickler(file_or_path) # @UndefinedVariable
#u.encoding = 'latin1'
#m = u.load(protocol=2)
m = pickle.load(f, encoding='latin1')#
return m

View file

@ -27,10 +27,10 @@ class Optimizer(object):
:rtype: optimizer object. :rtype: optimizer object.
""" """
def __init__(self, x_init, messages=False, model=None, max_f_eval=1e4, max_iters=1e3, def __init__(self, x_init=None, messages=False, max_f_eval=1e4, max_iters=1e3,
ftol=None, gtol=None, xtol=None, bfgs_factor=None): ftol=None, gtol=None, xtol=None, bfgs_factor=None):
self.opt_name = None self.opt_name = None
self.x_init = x_init #x_init = x_init
# Turning messages off and using internal structure for print outs: # Turning messages off and using internal structure for print outs:
self.messages = False #messages self.messages = False #messages
self.f_opt = None self.f_opt = None
@ -45,7 +45,6 @@ class Optimizer(object):
self.xtol = xtol self.xtol = xtol
self.gtol = gtol self.gtol = gtol
self.ftol = ftol self.ftol = ftol
self.model = model
def run(self, **kwargs): def run(self, **kwargs):
start = dt.datetime.now() start = dt.datetime.now()
@ -53,7 +52,7 @@ class Optimizer(object):
end = dt.datetime.now() end = dt.datetime.now()
self.time = str(end - start) self.time = str(end - start)
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
raise NotImplementedError("this needs to be implemented to use the optimizer class") raise NotImplementedError("this needs to be implemented to use the optimizer class")
def __str__(self): def __str__(self):
@ -64,12 +63,16 @@ class Optimizer(object):
diagnostics += "Time elapsed: \t\t\t\t %s\n" % self.time diagnostics += "Time elapsed: \t\t\t\t %s\n" % self.time
return diagnostics return diagnostics
def __getstate__(self):
return []
class opt_tnc(Optimizer): class opt_tnc(Optimizer):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "TNC (Scipy implementation)" self.opt_name = "TNC (Scipy implementation)"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
""" """
Run the TNC optimizer Run the TNC optimizer
@ -87,7 +90,7 @@ class opt_tnc(Optimizer):
if self.gtol is not None: if self.gtol is not None:
opt_dict['pgtol'] = self.gtol opt_dict['pgtol'] = self.gtol
opt_result = optimize.fmin_tnc(f_fp, self.x_init, messages=self.messages, opt_result = optimize.fmin_tnc(f_fp, x_init, messages=self.messages,
maxfun=self.max_f_eval, **opt_dict) maxfun=self.max_f_eval, **opt_dict)
self.x_opt = opt_result[0] self.x_opt = opt_result[0]
self.f_opt = f_fp(self.x_opt)[0] self.f_opt = f_fp(self.x_opt)[0]
@ -99,7 +102,7 @@ class opt_lbfgsb(Optimizer):
Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "L-BFGS-B (Scipy implementation)" self.opt_name = "L-BFGS-B (Scipy implementation)"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
""" """
Run the optimizer Run the optimizer
@ -123,7 +126,7 @@ class opt_lbfgsb(Optimizer):
if self.bfgs_factor is not None: if self.bfgs_factor is not None:
opt_dict['factr'] = self.bfgs_factor opt_dict['factr'] = self.bfgs_factor
opt_result = optimize.fmin_l_bfgs_b(f_fp, self.x_init, iprint=iprint, opt_result = optimize.fmin_l_bfgs_b(f_fp, x_init, iprint=iprint,
maxfun=self.max_iters, **opt_dict) maxfun=self.max_iters, **opt_dict)
self.x_opt = opt_result[0] self.x_opt = opt_result[0]
self.f_opt = f_fp(self.x_opt)[0] self.f_opt = f_fp(self.x_opt)[0]
@ -139,7 +142,7 @@ class opt_bfgs(Optimizer):
Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "BFGS (Scipy implementation)" self.opt_name = "BFGS (Scipy implementation)"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
""" """
Run the optimizer Run the optimizer
@ -154,7 +157,7 @@ class opt_bfgs(Optimizer):
if self.gtol is not None: if self.gtol is not None:
opt_dict['pgtol'] = self.gtol opt_dict['pgtol'] = self.gtol
opt_result = optimize.fmin_bfgs(f, self.x_init, fp, disp=self.messages, opt_result = optimize.fmin_bfgs(f, x_init, fp, disp=self.messages,
maxiter=self.max_iters, full_output=True, **opt_dict) maxiter=self.max_iters, full_output=True, **opt_dict)
self.x_opt = opt_result[0] self.x_opt = opt_result[0]
self.f_opt = f_fp(self.x_opt)[0] self.f_opt = f_fp(self.x_opt)[0]
@ -166,7 +169,7 @@ class opt_simplex(Optimizer):
Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "Nelder-Mead simplex routine (via Scipy)" self.opt_name = "Nelder-Mead simplex routine (via Scipy)"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
""" """
The simplex optimizer does not require gradients. The simplex optimizer does not require gradients.
""" """
@ -181,7 +184,7 @@ class opt_simplex(Optimizer):
if self.gtol is not None: if self.gtol is not None:
print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it")
opt_result = optimize.fmin(f, self.x_init, (), disp=self.messages, opt_result = optimize.fmin(f, x_init, (), disp=self.messages,
maxfun=self.max_f_eval, full_output=True, **opt_dict) maxfun=self.max_f_eval, full_output=True, **opt_dict)
self.x_opt = opt_result[0] self.x_opt = opt_result[0]
@ -196,7 +199,7 @@ class opt_rasm(Optimizer):
Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "Rasmussen's Conjugate Gradient" self.opt_name = "Rasmussen's Conjugate Gradient"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
""" """
Run Rasmussen's Conjugate Gradient optimizer Run Rasmussen's Conjugate Gradient optimizer
""" """
@ -213,7 +216,7 @@ class opt_rasm(Optimizer):
if self.gtol is not None: if self.gtol is not None:
print("WARNING: minimize doesn't have an gtol arg, so I'm going to ignore it") print("WARNING: minimize doesn't have an gtol arg, so I'm going to ignore it")
opt_result = rasm.minimize(self.x_init, f_fp, (), messages=self.messages, opt_result = rasm.minimize(x_init, f_fp, (), messages=self.messages,
maxnumfuneval=self.max_f_eval) maxnumfuneval=self.max_f_eval)
self.x_opt = opt_result[0] self.x_opt = opt_result[0]
self.f_opt = opt_result[1][-1] self.f_opt = opt_result[1][-1]
@ -230,11 +233,11 @@ class opt_SCG(Optimizer):
self.opt_name = "Scaled Conjugate Gradients" self.opt_name = "Scaled Conjugate Gradients"
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
assert not f is None assert not f is None
assert not fp is None assert not fp is None
opt_result = SCG(f, fp, self.x_init, display=self.messages, opt_result = SCG(f, fp, x_init, display=self.messages,
maxiters=self.max_iters, maxiters=self.max_iters,
max_f_eval=self.max_f_eval, max_f_eval=self.max_f_eval,
xtol=self.xtol, ftol=self.ftol, xtol=self.xtol, ftol=self.ftol,
@ -254,12 +257,12 @@ class Opt_Adadelta(Optimizer):
self.decay = decay self.decay = decay
self.momentum = momentum self.momentum = momentum
def opt(self, f_fp=None, f=None, fp=None): def opt(self, x_init, f_fp=None, f=None, fp=None):
assert not fp is None assert not fp is None
import climin import climin
opt = climin.adadelta.Adadelta(self.x_init, fp, step_rate=self.step_rate, decay=self.decay, momentum=self.momentum) opt = climin.adadelta.Adadelta(x_init, fp, step_rate=self.step_rate, decay=self.decay, momentum=self.momentum)
for info in opt: for info in opt:
if info['n_iter']>=self.max_iters: if info['n_iter']>=self.max_iters:

View file

@ -60,6 +60,10 @@ class Kern(Parameterized):
from .psi_comp import PSICOMP_GH from .psi_comp import PSICOMP_GH
self.psicomp = PSICOMP_GH() self.psicomp = PSICOMP_GH()
def __setstate__(self, state):
self._all_dims_active = range(0, max(state['active_dims'])+1)
super(Kern, self).__setstate__(state)
@property @property
def _effective_input_dim(self): def _effective_input_dim(self):
return self._all_dims_active.size return self._all_dims_active.size

View file

@ -47,6 +47,7 @@ class RBF(Stationary):
return dc return dc
def __setstate__(self, state): def __setstate__(self, state):
self.use_invLengthscale = False
return super(RBF, self).__setstate__(state) return super(RBF, self).__setstate__(state)
def spectrum(self, omega): def spectrum(self, omega):