diff --git a/GPy/core/parameterization/param.py b/GPy/core/parameterization/param.py index b73e7dfa..984fc950 100644 --- a/GPy/core/parameterization/param.py +++ b/GPy/core/parameterization/param.py @@ -368,26 +368,26 @@ class ParamConcatenation(object): #=========================================================================== def __getitem__(self, s): ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True; - params = [p._get_params()[ind[ps]] for p,ps in zip(self.params, self._param_slices_) if numpy.any(p._get_params()[ind[ps]])] + params = [p._param_array_[ind[ps]] for p,ps in zip(self.params, self._param_slices_) if numpy.any(p._param_array_[ind[ps]])] if len(params)==1: return params[0] return ParamConcatenation(params) def __setitem__(self, s, val, update=True): if isinstance(val, ParamConcatenation): - val = val._vals() + val = val.values() ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True; - vals = self._vals(); vals[s] = val; del val + vals = self.values(); vals[s] = val; del val [numpy.place(p, ind[ps], vals[ps]) for p, ps in zip(self.params, self._param_slices_)] if update: self.update_all_params() - def _vals(self): + def values(self): return numpy.hstack([p._param_array_ for p in self.params]) #=========================================================================== # parameter operations: #=========================================================================== def update_all_params(self): for par in self.parents: - par.notify_observers(-numpy.inf) + par.notify_observers() def constrain(self, constraint, warning=True): [param.constrain(constraint, trigger_parent=False) for param in self.params] @@ -442,12 +442,12 @@ class ParamConcatenation(object): return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance) #checkgrad.__doc__ = Gradcheckable.checkgrad.__doc__ - __lt__ = lambda self, val: self._vals() < val - __le__ = lambda self, val: self._vals() <= val - __eq__ = lambda self, val: self._vals() == val - __ne__ = lambda self, val: self._vals() != val - __gt__ = lambda self, val: self._vals() > val - __ge__ = lambda self, val: self._vals() >= val + __lt__ = lambda self, val: self.values() < val + __le__ = lambda self, val: self.values() <= val + __eq__ = lambda self, val: self.values() == val + __ne__ = lambda self, val: self.values() != val + __gt__ = lambda self, val: self.values() > val + __ge__ = lambda self, val: self.values() >= val def __str__(self, *args, **kwargs): def f(p): ind = p._raveled_index() diff --git a/GPy/core/parameterization/parameter_core.py b/GPy/core/parameterization/parameter_core.py index 48fe69c2..1cdeee0b 100644 --- a/GPy/core/parameterization/parameter_core.py +++ b/GPy/core/parameterization/parameter_core.py @@ -16,7 +16,7 @@ Observable Pattern for patameterization from transformations import Transformation, Logexp, NegativeLogexp, Logistic, __fixed__, FIXED, UNFIXED import numpy as np -__updated__ = '2014-03-18' +__updated__ = '2014-03-24' class HierarchyError(Exception): """ @@ -28,7 +28,7 @@ def adjust_name_for_printing(name): Make sure a name can be printed, alongside used as a variable name. """ if name is not None: - return name.replace(" ", "_").replace(".", "_").replace("-", "_m_").replace("+", "_p_").replace("!", "_I_").replace("**", "_xx_").replace("*", "_x_").replace("/", "_l_").replace("@",'_at_') + return name.replace(" ", "_").replace(".", "_").replace("-", "_m_").replace("+", "_p_").replace("!", "_I_").replace("**", "_xx_").replace("*", "_x_").replace("/", "_l_").replace("@", '_at_') return '' class InterfacePickleFunctions(object): @@ -126,24 +126,22 @@ class Observable(Pickleable): def notify_observers(self, which=None, min_priority=None): """ Notifies all observers. Which is the element, which kicked off this - notification loop. + notification loop. The first argument will be self, the second `which`. NOTE: notifies only observers with priority p > min_priority! ^^^^^^^^^^^^^^^^ - - :param which: object, which started this notification loop :param min_priority: only notify observers with priority > min_priority if min_priority is None, notify all observers in order """ if which is None: which = self if min_priority is None: - [callble(which) for _, _, callble in self._observer_callables_] + [callble(self, which=which) for _, _, callble in self._observer_callables_] else: for p, _, callble in self._observer_callables_: if p <= min_priority: break - callble(which) + callble(self, which=which) def _insert_sorted(self, p, o, c): ins = 0 @@ -627,7 +625,7 @@ class OptimizationHandlable(Constrainable): # else: min_priority = -np.inf # self.notify_observers(None, min_priority) # don't overwrite this anymore! - #raise NotImplementedError, "Abstract superclass: This needs to be implemented in Param and Parameterizable" + # raise NotImplementedError, "Abstract superclass: This needs to be implemented in Param and Parameterizable" #=========================================================================== # Optimization handles: @@ -659,7 +657,7 @@ class OptimizationHandlable(Constrainable): x = rand_gen(loc=loc, scale=scale, size=self._size_transformed(), *args, **kwargs) # now draw from prior where possible [np.put(x, ind, p.rvs(ind.size)) for p, ind in self.priors.iteritems() if not p is None] - self._set_params_transformed(x) # makes sure all of the tied parameters get the same init (since there's only one prior object...) + self._set_params_transformed(x) # makes sure all of the tied parameters get the same init (since there's only one prior object...) #=========================================================================== # For shared memory arrays. This does nothing in Param, but sets the memory @@ -668,10 +666,10 @@ class OptimizationHandlable(Constrainable): def _propagate_param_grad(self, parray, garray): pi_old_size = 0 for pi in self._parameters_: - pislice = slice(pi_old_size, pi_old_size+pi.size) + pislice = slice(pi_old_size, pi_old_size + pi.size) - self._param_array_[pislice] = pi._param_array_.flat#, requirements=['C', 'W']).flat - self._gradient_array_[pislice] = pi._gradient_array_.flat#, requirements=['C', 'W']).flat + self._param_array_[pislice] = pi._param_array_.flat # , requirements=['C', 'W']).flat + self._gradient_array_[pislice] = pi._gradient_array_.flat # , requirements=['C', 'W']).flat pi._param_array_.data = parray[pislice].data pi._gradient_array_.data = garray[pislice].data @@ -723,7 +721,7 @@ class Parameterizable(OptimizationHandlable): self.__dict__[pname] = param self._added_names_.add(pname) else: - print "WARNING: added a parameter with formatted name {}, which is already a member of {} object. Trying to change the parameter name to\n {}".format(pname, self.__class__, param.name+"_") + print "WARNING: added a parameter with formatted name {}, which is already a member of {} object. Trying to change the parameter name to\n {}".format(pname, self.__class__, param.name + "_") param.name += "_" self._add_parameter_name(param, ignore_added_names) @@ -781,7 +779,7 @@ class Parameterizable(OptimizationHandlable): if param in self._parameters_ and index is not None: self.remove_parameter(param) self.add_parameter(param, index) - #elif param.has_parent(): + # elif param.has_parent(): # raise HierarchyError, "parameter {} already in another model ({}), create new object (or copy) for adding".format(param._short(), param._highest_parent_._short()) elif param not in self._parameters_: if param.has_parent(): @@ -874,12 +872,12 @@ class Parameterizable(OptimizationHandlable): p._parent_ = self p._parent_index_ = i - pslice = slice(old_size, old_size+p.size) + pslice = slice(old_size, old_size + p.size) # first connect all children p._propagate_param_grad(self._param_array_[pslice], self._gradient_array_[pslice]) # then connect children to self - self._param_array_[pslice] = p._param_array_.flat#, requirements=['C', 'W']).ravel(order='C') - self._gradient_array_[pslice] = p._gradient_array_.flat#, requirements=['C', 'W']).ravel(order='C') + self._param_array_[pslice] = p._param_array_.flat # , requirements=['C', 'W']).ravel(order='C') + self._gradient_array_[pslice] = p._gradient_array_.flat # , requirements=['C', 'W']).ravel(order='C') if not p._param_array_.flags['C_CONTIGUOUS']: import ipdb;ipdb.set_trace() @@ -894,10 +892,10 @@ class Parameterizable(OptimizationHandlable): #=========================================================================== # notification system #=========================================================================== - def _parameters_changed_notification(self, which): + def _parameters_changed_notification(self, me, which=None): self.parameters_changed() - def _pass_through_notify_observers(self, which): - self.notify_observers(which) + def _pass_through_notify_observers(self, me, which=None): + self.notify_observers(which=which) #=========================================================================== # TODO: not working yet diff --git a/GPy/core/parameterization/parameterized.py b/GPy/core/parameterization/parameterized.py index 8551c831..6460c988 100644 --- a/GPy/core/parameterization/parameterized.py +++ b/GPy/core/parameterization/parameterized.py @@ -157,7 +157,7 @@ class Parameterized(Parameterizable, Pickleable): return self._param_slices_[param._parent_._get_original(param)._parent_index_].start return self._offset_for(param._parent_) + param._parent_._offset_for(param) return 0 - + def _raveled_index_for(self, param): """ get the raveled index for a param @@ -167,7 +167,7 @@ class Parameterized(Parameterizable, Pickleable): if isinstance(param, ParamConcatenation): return numpy.hstack((self._raveled_index_for(p) for p in param.params)) return param._raveled_index() + self._offset_for(param) - + def _raveled_index(self): """ get the raveled index for this object, @@ -218,14 +218,17 @@ class Parameterized(Parameterizable, Pickleable): return ParamConcatenation(paramlist) return paramlist[-1] return ParamConcatenation(paramlist) - + def __setitem__(self, name, value, paramlist=None): if isinstance(name, (slice, tuple, np.ndarray)): - self._param_array_[name] = value - self.notify_observers() + try: + self._param_array_[name] = value + except: + raise ValueError, "Setting by slice or index only allowed with array-like" + self._trigger_params_changed() else: try: param = self.__getitem__(name, paramlist) - except AttributeError as a: raise a + except: raise param[:] = value def __setattr__(self, name, val): diff --git a/GPy/testing/likelihood_tests.py b/GPy/testing/likelihood_tests.py index 341b61d4..7276e108 100644 --- a/GPy/testing/likelihood_tests.py +++ b/GPy/testing/likelihood_tests.py @@ -538,11 +538,9 @@ class TestNoiseModels(object): m.checkgrad(verbose=1, step=step) #if not m.checkgrad(step=step): #m.checkgrad(verbose=1, step=step) - #import ipdb; ipdb.set_trace() #NOTE this test appears to be stochastic for some likelihoods (student t?) # appears to all be working in test mode right now... #if isinstance(model, GPy.likelihoods.StudentT): - # import ipdb;ipdb.set_trace() assert m.checkgrad(step=step) ########### diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index 2767b559..3c39c5e0 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -58,7 +58,77 @@ class MiscTests(unittest.TestCase): np.testing.assert_almost_equal(np.diag(K_hat)[:, None], var) #np.testing.assert_almost_equal(mu_hat, mu) + def test_likelihood_replicate(self): + m = GPy.models.GPRegression(self.X, self.Y) + m2 = GPy.models.GPRegression(self.X, self.Y) + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.randomize() + m2[:] = m[''].values() + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.randomize() + m2[''] = m[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.randomize() + m2[:] = m[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.randomize() + m2[''] = m[''] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.kern.lengthscale.randomize() + m2[:] = m[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m.Gaussian_noise.randomize() + m2[:] = m[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m['.*var'] = 2 + m2['.*var'] = m['.*var'] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + + def test_likelihood_set(self): + m = GPy.models.GPRegression(self.X, self.Y) + m2 = GPy.models.GPRegression(self.X, self.Y) + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m.kern.lengthscale.randomize() + m._trigger_params_changed() + m2.kern.lengthscale = m.kern.lengthscale + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m.kern.lengthscale.randomize() + m._trigger_params_changed() + m2['.*lengthscale'] = m.kern.lengthscale + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m.kern.lengthscale.randomize() + m._trigger_params_changed() + m2['.*lengthscale'] = m.kern['.*lengthscale'] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + m.kern.lengthscale.randomize() + m._trigger_params_changed() + m2.kern.lengthscale = m.kern['.*lengthscale'] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + + def test_likelihood_replicate_kern(self): + m = GPy.models.GPRegression(self.X, self.Y) + m2 = GPy.models.GPRegression(self.X, self.Y) + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.kern.randomize() + m2.kern[''] = m.kern[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.kern.randomize() + m2.kern[:] = m.kern[:] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.kern.randomize() + m2.kern[''] = m.kern[''] + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + m.kern.randomize() + m2.kern[:] = m.kern[''].values() + np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) class GradientTests(unittest.TestCase): def setUp(self): diff --git a/GPy/testing/observable_tests.py b/GPy/testing/observable_tests.py index 6d463a91..90623703 100644 --- a/GPy/testing/observable_tests.py +++ b/GPy/testing/observable_tests.py @@ -45,7 +45,7 @@ class Test(unittest.TestCase): self._first = None self._second = None - def _trigger(self, which): + def _trigger(self, me, which): self._observer_triggered = which self._trigger_count += 1 if self._first is not None: @@ -53,7 +53,7 @@ class Test(unittest.TestCase): else: self._first = self._trigger - def _trigger_priority(self, which): + def _trigger_priority(self, me, which): if self._first is not None: self._second = self._trigger_priority else: diff --git a/GPy/util/caching.py b/GPy/util/caching.py index 0b8039d6..282c9f8c 100644 --- a/GPy/util/caching.py +++ b/GPy/util/caching.py @@ -79,13 +79,13 @@ class Cacher(object): self.reset() raise - def on_cache_changed(self, arg): + def on_cache_changed(self, direct, which=None): """ A callback funtion, which sets local flags when the elements of some cached inputs change this function gets 'hooked up' to the inputs when we cache them, and upon their elements being changed we update here. """ - self.inputs_changed = [any([a is arg for a in args]) or old_ic for args, old_ic in zip(self.cached_inputs, self.inputs_changed)] + self.inputs_changed = [any([a is direct or a is which for a in args]) or old_ic for args, old_ic in zip(self.cached_inputs, self.inputs_changed)] def reset(self): """ @@ -101,7 +101,7 @@ class Cacher(object): def __name__(self): return self.operation.__name__ -from functools import wraps, partial +from functools import partial class Cacher_wrap(object): def __init__(self, f, limit, ignore_args, force_kwargs): @@ -113,6 +113,7 @@ class Cacher_wrap(object): return partial(self, obj) def __call__(self, *args, **kwargs): obj = args[0] + #import ipdb;ipdb.set_trace() try: caches = obj.__cachers except AttributeError: