added autodection of Rasmussen's minimize

This commit is contained in:
Nicolo Fusi 2012-12-14 13:57:29 +00:00
parent da51f69ec3
commit 11dacb378a

View file

@ -3,7 +3,13 @@
from scipy import optimize from scipy import optimize
# import rasmussens_minimize as rasm
try:
import rasmussens_minimize as rasm
rasm_available = True
except ImportError:
rasm_available = False
import pdb import pdb
import pylab as pb import pylab as pb
import datetime as dt import datetime as dt
@ -38,7 +44,7 @@ class Optimizer():
self.gtol = gtol self.gtol = gtol
self.ftol = ftol self.ftol = ftol
self.model = model self.model = model
def run(self, **kwargs): def run(self, **kwargs):
start = dt.datetime.now() start = dt.datetime.now()
self.opt(**kwargs) self.opt(**kwargs)
@ -159,43 +165,45 @@ class opt_simplex(Optimizer):
self.trace = None self.trace = None
# class opt_rasm(Optimizer): class opt_rasm(Optimizer):
# def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# Optimizer.__init__(self, *args, **kwargs) Optimizer.__init__(self, *args, **kwargs)
# self.opt_name = "Rasmussen's Conjugate Gradient" self.opt_name = "Rasmussen's Conjugate Gradient"
# def opt(self): def opt(self):
# """ """
# Run Rasmussen's Conjugate Gradient optimizer Run Rasmussen's Conjugate Gradient optimizer
# """ """
# assert self.f_fp != None, "Rasmussen's minimizer requires f_fp" assert self.f_fp != None, "Rasmussen's minimizer requires f_fp"
# statuses = ['Converged', 'Line search failed', 'Maximum number of f evaluations reached', statuses = ['Converged', 'Line search failed', 'Maximum number of f evaluations reached',
# 'NaNs in optimization'] 'NaNs in optimization']
# opt_dict = {} opt_dict = {}
# if self.xtol is not None: if self.xtol is not None:
# print "WARNING: minimize doesn't have an xtol arg, so I'm going to ignore it" print "WARNING: minimize doesn't have an xtol arg, so I'm going to ignore it"
# if self.ftol is not None: if self.ftol is not None:
# print "WARNING: minimize doesn't have an ftol arg, so I'm going to ignore it" print "WARNING: minimize doesn't have an ftol arg, so I'm going to ignore it"
# if self.gtol is not None: if self.gtol is not None:
# print "WARNING: minimize doesn't have an gtol arg, so I'm going to ignore it" print "WARNING: minimize doesn't have an gtol arg, so I'm going to ignore it"
# opt_result = rasm.minimize(self.x_init, self.f_fp, (), messages = self.messages, opt_result = rasm.minimize(self.x_init, self.f_fp, (), messages = self.messages,
# maxnumfuneval = self.max_f_eval) maxnumfuneval = self.max_f_eval)
# self.x_opt = opt_result[0] self.x_opt = opt_result[0]
# self.f_opt = opt_result[1][-1] self.f_opt = opt_result[1][-1]
# self.funct_eval = opt_result[2] self.funct_eval = opt_result[2]
# self.status = statuses[opt_result[3]] self.status = statuses[opt_result[3]]
# self.trace = opt_result[1] self.trace = opt_result[1]
def get_optimizer(f_min): def get_optimizer(f_min):
optimizers = {'fmin_tnc': opt_tnc, optimizers = {'fmin_tnc': opt_tnc,
# 'rasmussen': opt_rasm,
'simplex': opt_simplex, 'simplex': opt_simplex,
'lbfgsb': opt_lbfgsb} 'lbfgsb': opt_lbfgsb}
if rasm_available:
optimizers['rasmussen'] = opt_rasm
for opt_name in optimizers.keys(): for opt_name in optimizers.keys():
if opt_name.lower().find(f_min.lower()) != -1: if opt_name.lower().find(f_min.lower()) != -1:
return optimizers[opt_name] return optimizers[opt_name]