mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-15 06:52:39 +02:00
add adadelta as an optimizer
This commit is contained in:
parent
f71b57c24e
commit
09571e9264
2 changed files with 24 additions and 2 deletions
|
|
@ -255,7 +255,7 @@ class Model(Parameterized):
|
||||||
opt.model = self
|
opt.model = self
|
||||||
else:
|
else:
|
||||||
optimizer = optimization.get_optimizer(optimizer)
|
optimizer = optimization.get_optimizer(optimizer)
|
||||||
opt = optimizer(start, model=self, max_iters=max_iters, **kwargs)
|
opt = optimizer(x_init=start, model=self, max_iters=max_iters, **kwargs)
|
||||||
|
|
||||||
with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo:
|
with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo:
|
||||||
opt.run(f_fp=self._objective_grads, f=self._objective, fp=self._grads)
|
opt.run(f_fp=self._objective_grads, f=self._objective, fp=self._grads)
|
||||||
|
|
|
||||||
|
|
@ -228,13 +228,35 @@ class opt_SCG(Optimizer):
|
||||||
self.f_opt = self.trace[-1]
|
self.f_opt = self.trace[-1]
|
||||||
self.funct_eval = opt_result[2]
|
self.funct_eval = opt_result[2]
|
||||||
self.status = opt_result[3]
|
self.status = opt_result[3]
|
||||||
|
|
||||||
|
class Opt_Adadelta(Optimizer):
|
||||||
|
def __init__(self, step_rate=0.1, decay=0.9, momentum=0, *args, **kwargs):
|
||||||
|
Optimizer.__init__(self, *args, **kwargs)
|
||||||
|
self.opt_name = "Adadelta (climin)"
|
||||||
|
self.step_rate=step_rate
|
||||||
|
self.decay = decay
|
||||||
|
self.momentum = momentum
|
||||||
|
|
||||||
|
def opt(self, f_fp=None, f=None, fp=None):
|
||||||
|
assert not fp is None
|
||||||
|
|
||||||
|
import climin
|
||||||
|
|
||||||
|
opt = climin.adadelta.Adadelta(self.x_init, fp, step_rate=self.step_rate, decay=self.decay, momentum=self.momentum)
|
||||||
|
|
||||||
|
for info in opt:
|
||||||
|
if info['n_iter']>=self.max_iters:
|
||||||
|
self.x_opt = opt.wrt
|
||||||
|
self.status = 'maximum number of function evaluations exceeded '
|
||||||
|
break
|
||||||
|
|
||||||
def get_optimizer(f_min):
|
def get_optimizer(f_min):
|
||||||
|
|
||||||
optimizers = {'fmin_tnc': opt_tnc,
|
optimizers = {'fmin_tnc': opt_tnc,
|
||||||
'simplex': opt_simplex,
|
'simplex': opt_simplex,
|
||||||
'lbfgsb': opt_lbfgsb,
|
'lbfgsb': opt_lbfgsb,
|
||||||
'scg': opt_SCG}
|
'scg': opt_SCG,
|
||||||
|
'adadelta':Opt_Adadelta}
|
||||||
|
|
||||||
if rasm_available:
|
if rasm_available:
|
||||||
optimizers['rasmussen'] = opt_rasm
|
optimizers['rasmussen'] = opt_rasm
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue