mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-09 03:52:39 +02:00
models can now specify a preferred optimser (defaults to tnc)
This commit is contained in:
parent
1a9872c187
commit
4dcf2b85ce
1 changed files with 7 additions and 2 deletions
|
|
@ -18,6 +18,7 @@ class model(parameterised):
|
|||
self.optimization_runs = []
|
||||
self.sampling_runs = []
|
||||
self.set_param(self.get_param())
|
||||
self.preferred_optimizer = 'tnc'
|
||||
def get_param(self):
|
||||
raise NotImplementedError, "this needs to be implemented to utilise the model class"
|
||||
def set_param(self,x):
|
||||
|
|
@ -161,15 +162,19 @@ class model(parameterised):
|
|||
else:
|
||||
self.expand_param(initial_parameters)
|
||||
|
||||
def optimize(self, optimizer = 'tnc', start = None, **kwargs):
|
||||
def optimize(self, optimizer=None, start=None, **kwargs):
|
||||
"""
|
||||
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
|
||||
kwargs are passed to the optimizer. They can be:
|
||||
|
||||
:max_f_eval: maximum number of function evaluations
|
||||
:messages: whether to display during optimisatio
|
||||
:messages: whether to display during optimisation
|
||||
:param optimzer: whice optimizer to use (defaults to self.preferred optimizer)
|
||||
:type optimzer: string TODO: valid strings?
|
||||
|
||||
"""
|
||||
if optimizer is None:
|
||||
optimizer = self.preferred_optimizer
|
||||
|
||||
def f(x):
|
||||
self.expand_param(x)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue