diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 25066381..3252ac08 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -124,6 +124,7 @@ class GP(Model): else: self.X = ObsAr(X) self.update_model(True) + self._trigger_params_changed() def set_X(self,X): """ diff --git a/GPy/core/model.py b/GPy/core/model.py index d61b9b43..c5d318e7 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -154,7 +154,7 @@ class Model(Parameterized): """ return -(self._log_likelihood_gradients() + self._log_prior_gradients()) - def _objective_grads(self, x): + def _grads(self, x): """ Gets the gradients from the likelihood and the priors. @@ -200,7 +200,7 @@ class Model(Parameterized): return np.inf return obj - def _objective_and_grads(self, x): + def _objective_grads(self, x): try: self.optimizer_array = x obj_f, self.obj_grads = self.objective_function(), self._transform_gradients(self.objective_function_gradients()) @@ -213,7 +213,7 @@ class Model(Parameterized): self.obj_grads = np.clip(self._transform_gradients(self.objective_function_gradients()), -1e10, 1e10) return obj_f, self.obj_grads - def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=False, **kwargs): + def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, **kwargs): """ Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. @@ -255,9 +255,10 @@ class Model(Parameterized): else: optimizer = optimization.get_optimizer(optimizer) opt = optimizer(start, model=self, max_iters=max_iters, **kwargs) - - with VerboseOptimization(self, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook): - opt.run(f_fp=self._objective_and_grads, f=self._objective, fp=self._objective_grads) + + with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook) as vo: + opt.run(f_fp=self._objective_grads, f=self._objective, fp=self._grads) + vo.finish(opt) self.optimization_runs.append(opt) @@ -314,7 +315,7 @@ class Model(Parameterized): # evaulate around the point x f1 = self._objective(x + dx) f2 = self._objective(x - dx) - gradient = self._objective_grads(x) + gradient = self._grads(x) dx = dx[transformed_index] gradient = gradient[transformed_index] @@ -360,7 +361,7 @@ class Model(Parameterized): print "No free parameters to check" return - gradient = self._objective_grads(x).copy() + gradient = self._grads(x).copy() np.where(gradient == 0, 1e-312, gradient) ret = True for nind, xind in itertools.izip(param_index, transformed_index): @@ -401,12 +402,14 @@ class Model(Parameterized): model_details = [['Model', self.name + '
'], ['Log-likelihood', '{}
'.format(float(self.log_likelihood()))], ["Number of Parameters", '{}
'.format(self.size)], - ["Updates", '{}
'.format(self._updates)], + ["Updates", '{}
'.format(self._update_on)], ] from operator import itemgetter to_print = ["""\n"""] + ["

"] + ["{}: {}".format(name, detail) for name, detail in model_details] + ["

"] to_print.append(super(Model, self)._repr_html_()) @@ -416,7 +419,7 @@ class Model(Parameterized): model_details = [['Name', self.name], ['Log-likelihood', '{}'.format(float(self.log_likelihood()))], ["Number of Parameters", '{}'.format(self.size)], - ["Updates", '{}'.format(self._updates)], + ["Updates", '{}'.format(self._update_on)], ] from operator import itemgetter max_len = reduce(lambda a, b: max(len(b[0]), a), model_details, 0) diff --git a/GPy/core/parameterization/observable.py b/GPy/core/parameterization/observable.py index 4782d2ea..8a85c6ca 100644 --- a/GPy/core/parameterization/observable.py +++ b/GPy/core/parameterization/observable.py @@ -14,6 +14,10 @@ class Observable(object): super(Observable, self).__init__() from lists_and_dicts import ObserverList self.observers = ObserverList() + self._update_on = True + + def set_updates(self, on=True): + self._update_on = on def add_observer(self, observer, callble, priority=0): """ @@ -51,15 +55,16 @@ class Observable(object): :param min_priority: only notify observers with priority > min_priority if min_priority is None, notify all observers in order """ - if which is None: - which = self - if min_priority is None: - [callble(self, which=which) for _, _, callble in self.observers] - else: - for p, _, callble in self.observers: - if p <= min_priority: - break - callble(self, which=which) + if self._update_on: + if which is None: + which = self + if min_priority is None: + [callble(self, which=which) for _, _, callble in self.observers] + else: + for p, _, callble in self.observers: + if p <= min_priority: + break + callble(self, which=which) def change_priority(self, observer, callble, priority): self.remove_observer(observer, callble) diff --git a/GPy/core/parameterization/param.py b/GPy/core/parameterization/param.py index 2fbb5df5..1246bc18 100644 --- a/GPy/core/parameterization/param.py +++ b/GPy/core/parameterization/param.py @@ -84,6 +84,7 @@ class Param(Parameterizable, ObsAr): self._original_ = getattr(obj, '_original_', None) self._name = getattr(obj, '_name', None) self._gradient_array_ = getattr(obj, '_gradient_array_', None) + self._update_on = getattr(obj, '_update_on', None) self.constraints = getattr(obj, 'constraints', None) self.priors = getattr(obj, 'priors', None) @@ -273,7 +274,7 @@ class Param(Parameterizable, ObsAr): header = header_format.format(x=self.hierarchy_name(), c=__constraints_name__, i=__index_name__, t=__tie_name__, p=__priors_name__) # nice header for printing if not ties: ties = itertools.cycle(['']) return "\n".join(["""