From 4dcf2b85ced1df4fe7c9536a2c903790c619361e Mon Sep 17 00:00:00 2001 From: James Hensman Date: Tue, 4 Dec 2012 13:10:17 -0800 Subject: [PATCH] models can now specify a preferred optimser (defaults to tnc) --- GPy/core/model.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/GPy/core/model.py b/GPy/core/model.py index ab4f8246..a9b1f0d3 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -18,6 +18,7 @@ class model(parameterised): self.optimization_runs = [] self.sampling_runs = [] self.set_param(self.get_param()) + self.preferred_optimizer = 'tnc' def get_param(self): raise NotImplementedError, "this needs to be implemented to utilise the model class" def set_param(self,x): @@ -161,15 +162,19 @@ class model(parameterised): else: self.expand_param(initial_parameters) - def optimize(self, optimizer = 'tnc', start = None, **kwargs): + def optimize(self, optimizer=None, start=None, **kwargs): """ Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. kwargs are passed to the optimizer. They can be: :max_f_eval: maximum number of function evaluations - :messages: whether to display during optimisatio + :messages: whether to display during optimisation + :param optimzer: whice optimizer to use (defaults to self.preferred optimizer) + :type optimzer: string TODO: valid strings? """ + if optimizer is None: + optimizer = self.preferred_optimizer def f(x): self.expand_param(x)