redid constraints

This commit is contained in:
Max Zwiessele 2014-02-12 17:11:55 +00:00
parent c0eddf29e7
commit a264cdaa98
8 changed files with 290 additions and 190 deletions

View file

@ -20,7 +20,7 @@ class Model(Parameterized):
_fail_count = 0 # Count of failed optimization steps (see objective) _fail_count = 0 # Count of failed optimization steps (see objective)
_allowed_failures = 10 # number of allowed failures _allowed_failures = 10 # number of allowed failures
def __init__(self, name): def __init__(self, name):
super(Model, self).__init__(name)#Parameterized.__init__(self) super(Model, self).__init__(name) # Parameterized.__init__(self)
self.priors = [] self.priors = []
self._priors = ParameterIndexOperations() self._priors = ParameterIndexOperations()
self.optimization_runs = [] self.optimization_runs = []
@ -30,10 +30,10 @@ class Model(Parameterized):
def log_likelihood(self): def log_likelihood(self):
raise NotImplementedError, "this needs to be implemented to use the model class" raise NotImplementedError, "this needs to be implemented to use the model class"
def _log_likelihood_gradients(self): def _log_likelihood_gradients(self):
#def dK_d(self, param, dL_dK, X, X2) # def dK_d(self, param, dL_dK, X, X2)
g = np.zeros(self.size) g = np.zeros(self.size)
try: try:
#[g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed] # [g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed]
[p._collect_gradient(g[s]) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed] [p._collect_gradient(g[s]) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed]
except ValueError: except ValueError:
raise ValueError, 'Gradient for {} not defined, please specify gradients for parameters to optimize'.format(p.name) raise ValueError, 'Gradient for {} not defined, please specify gradients for parameters to optimize'.format(p.name)
@ -168,14 +168,14 @@ class Model(Parameterized):
Make this draw from the prior if one exists, else draw from N(0,1) Make this draw from the prior if one exists, else draw from N(0,1)
""" """
# first take care of all parameters (from N(0,1)) # first take care of all parameters (from N(0,1))
#x = self._get_params_transformed() # x = self._get_params_transformed()
x = np.random.randn(self.size_transformed) x = np.random.randn(self.size_transformed)
x = self._untransform_params(x) x = self._untransform_params(x)
# now draw from prior where possible # now draw from prior where possible
if self.priors is not None and len(self.priors): if self.priors is not None and len(self.priors):
[np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None] [np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None]
self._set_params(x) self._set_params(x)
#self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...) # self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs):
""" """
@ -412,7 +412,7 @@ class Model(Parameterized):
gradient = self.objective_function_gradients(x) gradient = self.objective_function_gradients(x)
numerical_gradient = (f1 - f2) / (2 * dx) numerical_gradient = (f1 - f2) / (2 * dx)
global_ratio = (f1 - f2) / (2 * np.dot(dx, np.where(gradient==0, 1e-32, gradient))) global_ratio = (f1 - f2) / (2 * np.dot(dx, np.where(gradient == 0, 1e-32, gradient)))
return (np.abs(1. - global_ratio) < tolerance) or (np.abs(gradient - numerical_gradient).mean() < tolerance) return (np.abs(1. - global_ratio) < tolerance) or (np.abs(gradient - numerical_gradient).mean() < tolerance)
else: else:
@ -444,18 +444,18 @@ class Model(Parameterized):
return return
gradient = self.objective_function_gradients(x) gradient = self.objective_function_gradients(x)
np.where(gradient==0, 1e-312, gradient) np.where(gradient == 0, 1e-312, gradient)
ret = True ret = True
for i, ind in enumerate(param_list): for i, ind in enumerate(param_list):
xx = x.copy() xx = x.copy()
xx[ind] += step xx[i] += step
f1 = self.objective_function(xx) f1 = self.objective_function(xx)
xx[ind] -= 2.*step xx[i] -= 2.*step
f2 = self.objective_function(xx) f2 = self.objective_function(xx)
numerical_gradient = (f1 - f2) / (2 * step) numerical_gradient = (f1 - f2) / (2 * step)
ratio = (f1 - f2) / (2 * step * gradient[ind]) ratio = (f1 - f2) / (2 * step * gradient[i])
difference = np.abs((f1 - f2) / 2 / step - gradient[ind]) difference = np.abs((f1 - f2) / 2 / step - gradient[i])
if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance: if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance:
formatted_name = "\033[92m {0} \033[0m".format(names[ind]) formatted_name = "\033[92m {0} \033[0m".format(names[ind])
@ -466,7 +466,7 @@ class Model(Parameterized):
r = '%.6f' % float(ratio) r = '%.6f' % float(ratio)
d = '%.6f' % float(difference) d = '%.6f' % float(difference)
g = '%.6f' % gradient[ind] g = '%.6f' % gradient[i]
ng = '%.6f' % float(numerical_gradient) ng = '%.6f' % float(numerical_gradient)
grad_string = "{0:<{c0}}|{1:^{c1}}|{2:^{c2}}|{3:^{c3}}|{4:^{c4}}".format(formatted_name, r, d, g, ng, c0=cols[0] + 9, c1=cols[1], c2=cols[2], c3=cols[3], c4=cols[4]) grad_string = "{0:<{c0}}|{1:^{c1}}|{2:^{c2}}|{3:^{c3}}|{4:^{c4}}".format(formatted_name, r, d, g, ng, c0=cols[0] + 9, c1=cols[1], c2=cols[2], c3=cols[3], c4=cols[4])
print grad_string print grad_string
@ -517,10 +517,10 @@ class Model(Parameterized):
alpha = 0 alpha = 0
stop = False stop = False
#Handle **kwargs # Handle **kwargs
ep_args = {} ep_args = {}
for arg in kwargs.keys(): for arg in kwargs.keys():
if arg in ('epsilon','power_ep'): if arg in ('epsilon', 'power_ep'):
ep_args[arg] = kwargs[arg] ep_args[arg] = kwargs[arg]
del kwargs[arg] del kwargs[arg]
@ -528,7 +528,7 @@ class Model(Parameterized):
last_approximation = self.likelihood.copy() last_approximation = self.likelihood.copy()
last_params = self._get_params() last_params = self._get_params()
if len(ep_args) == 2: if len(ep_args) == 2:
self.update_likelihood_approximation(epsilon=ep_args['epsilon'],power_ep=ep_args['power_ep']) self.update_likelihood_approximation(epsilon=ep_args['epsilon'], power_ep=ep_args['power_ep'])
elif len(ep_args) == 1: elif len(ep_args) == 1:
if ep_args.keys()[0] == 'epsilon': if ep_args.keys()[0] == 'epsilon':
self.update_likelihood_approximation(epsilon=ep_args['epsilon']) self.update_likelihood_approximation(epsilon=ep_args['epsilon'])

View file

@ -4,7 +4,7 @@
__updated__ = '2013-12-16' __updated__ = '2013-12-16'
import numpy as np import numpy as np
from parameter_core import Observable, Constrainable, Gradcheckable from parameter_core import Observable, Parameterizable
class ParamList(list): class ParamList(list):
""" """

View file

@ -58,7 +58,7 @@ class ParameterIndexOperations(object):
index array, for multi-param handling. index array, for multi-param handling.
''' '''
def __init__(self): def __init__(self):
self._properties = ParamDict() self._properties = IntArrayDict()
#self._reverse = collections.defaultdict(list) #self._reverse = collections.defaultdict(list)
def __getstate__(self): def __getstate__(self):
@ -71,16 +71,19 @@ class ParameterIndexOperations(object):
def iteritems(self): def iteritems(self):
return self._properties.iteritems() return self._properties.iteritems()
def items(self):
return self._properties.items()
def properties(self): def properties(self):
return self._properties.keys() return self._properties.keys()
def iter_properties(self): def iterproperties(self):
return self._properties.iterkeys() return self._properties.iterkeys()
def shift(self, start, size): def shift(self, start, size):
for ind in self.iterindices(): for ind in self.iterindices():
toshift = ind>=start toshift = ind>=start
if len(toshift) > 0: if toshift.size > 0:
ind[toshift] += size ind[toshift] += size
def clear(self): def clear(self):
@ -96,7 +99,7 @@ class ParameterIndexOperations(object):
return self._properties.values() return self._properties.values()
def properties_for(self, index): def properties_for(self, index):
return vectorize(lambda i: [prop for prop in self.iter_properties() if i in self._properties[prop]], otypes=[list])(index) return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
def add(self, prop, indices): def add(self, prop, indices):
try: try:
@ -114,9 +117,14 @@ class ParameterIndexOperations(object):
del self._properties[prop] del self._properties[prop]
return removed.astype(int) return removed.astype(int)
return numpy.array([]).astype(int) return numpy.array([]).astype(int)
def __getitem__(self, prop): def __getitem__(self, prop):
return self._properties[prop] return self._properties[prop]
def __str__(self, *args, **kwargs):
import pprint
return pprint.pformat(dict(self._properties))
def combine_indices(arr1, arr2): def combine_indices(arr1, arr2):
return numpy.union1d(arr1, arr2) return numpy.union1d(arr1, arr2)
@ -126,5 +134,94 @@ def remove_indices(arr, to_remove):
def index_empty(index): def index_empty(index):
return numpy.size(index) == 0 return numpy.size(index) == 0
class ParameterIndexOperationsView(object):
def __init__(self, param_index_operations, offset, size):
self._param_index_ops = param_index_operations
self._offset = offset
self._size = size
def __getstate__(self):
return [self._param_index_ops, self._offset, self._size]
def __setstate__(self, state):
self._param_index_ops = state[0]
self._offset = state[1]
self._size = state[2]
def _filter_index(self, ind):
return ind[(ind >= self._offset) * (ind < (self._offset + self._size))] - self._offset
def iteritems(self):
for i, ind in self._param_index_ops.iteritems():
ind2 = self._filter_index(ind)
if ind2.size > 0:
yield i, ind2
def items(self):
return [[i,v] for i,v in self.iteritems()]
def properties(self):
return [i for i in self.iterproperties()]
def iterproperties(self):
for i, _ in self.iteritems():
yield i
def shift(self, start, size):
raise NotImplementedError, 'Shifting only supported in original ParamIndexOperations'
def clear(self):
for i, ind in self.items():
self._param_index_ops.remove(i, ind+self._offset)
def size(self):
return reduce(lambda a,b: a+b.size, self.iterindices(), 0)
def iterindices(self):
for _, ind in self.iteritems():
yield ind
def indices(self):
[ind for ind in self.iterindices()]
def properties_for(self, index):
return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
def add(self, prop, indices):
self._param_index_ops.add(prop, indices+self._offset)
def remove(self, prop, indices):
removed = self._param_index_ops.remove(prop, indices+self._offset)
if removed.size > 0:
return removed - self._size
return removed
def __getitem__(self, prop):
ind = self._filter_index(self._param_index_ops[prop])
if ind.size > 0:
return ind
raise KeyError, prop
def __str__(self, *args, **kwargs):
import pprint
return pprint.pformat(dict(self.iteritems()))
def update(self, parameter_index_view):
for i, v in parameter_index_view.iteritems():
self.add(i, v)
pass

View file

@ -3,7 +3,7 @@
import itertools import itertools
import numpy import numpy
from parameter_core import Constrainable, Gradcheckable, adjust_name_for_printing from parameter_core import Constrainable, Gradcheckable, Indexable, Parameterizable, adjust_name_for_printing
from array_core import ObservableArray, ParamList from array_core import ObservableArray, ParamList
###### printing ###### printing
@ -14,13 +14,7 @@ __precision__ = numpy.get_printoptions()['precision'] # numpy printing precision
__print_threshold__ = 5 __print_threshold__ = 5
###### ######
class Float(numpy.float64, Constrainable): class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parameterizable):
def __init__(self, f, base):
super(Float,self).__init__(f)
self._base = base
class Param(ObservableArray, Constrainable, Gradcheckable):
""" """
Parameter object for GPy models. Parameter object for GPy models.
@ -364,7 +358,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
return [self.shape] return [self.shape]
@property @property
def _constraints_str(self): def _constraints_str(self):
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self._highest_parent_._constraints_iter_items(self)))] return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self.constraints.iteritems()))]
@property @property
def _ties_str(self): def _ties_str(self):
return [t._short() for t in self._tied_to_] or [''] return [t._short() for t in self._tied_to_] or ['']
@ -390,7 +384,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
else: ties[i, matches[0]] = numpy.take(tt_rav_index, matches[1], mode='wrap') else: ties[i, matches[0]] = numpy.take(tt_rav_index, matches[1], mode='wrap')
return map(lambda a: sum(a, []), zip(*[[[tie.flatten()] if tx != None else [] for tx in t] for t, tie in zip(ties, self._tied_to_)])) return map(lambda a: sum(a, []), zip(*[[[tie.flatten()] if tx != None else [] for tx in t] for t, tie in zip(ties, self._tied_to_)]))
def _constraints_for(self, rav_index): def _constraints_for(self, rav_index):
return self._highest_parent_._constraints_for(self, rav_index) return self.constraints.properties_for(rav_index)
def _indices(self, slice_index=None): def _indices(self, slice_index=None):
# get a int-array containing all indices in the first axis. # get a int-array containing all indices in the first axis.
if slice_index is None: if slice_index is None:

View file

@ -1,7 +1,7 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
from transformations import Logexp, NegativeLogexp, Logistic from transformations import Transformation, Logexp, NegativeLogexp, Logistic
__updated__ = '2013-12-16' __updated__ = '2013-12-16'
@ -10,6 +10,11 @@ def adjust_name_for_printing(name):
return name.replace(" ", "_").replace(".", "_").replace("-","").replace("+","").replace("!","").replace("*","").replace("/","") return name.replace(" ", "_").replace(".", "_").replace("-","").replace("+","").replace("!","").replace("*","").replace("/","")
return '' return ''
#===============================================================================
# Printing:
__fixed__ = "fixed"
#===============================================================================
class Observable(object): class Observable(object):
_observers_ = {} _observers_ = {}
def add_observer(self, observer, callble): def add_observer(self, observer, callble):
@ -20,6 +25,22 @@ class Observable(object):
def _notify_observers(self): def _notify_observers(self):
[callble(self) for callble in self._observers_.itervalues()] [callble(self) for callble in self._observers_.itervalues()]
class Parameterizable(object):
def __init__(self, *args, **kwargs):
from GPy.core.parameterization.array_core import ParamList
_parameters_ = ParamList()
def parameter_names(self):
return [p.name for p in self._parameters_]
def parameters_changed(self):
"""
This method gets called when parameters have changed.
Another way of listening to param changes is to
add self as a listener to the param, such that
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
"""
pass
class Pickleable(object): class Pickleable(object):
def _getstate(self): def _getstate(self):
@ -89,11 +110,22 @@ class Gradcheckable(Parentable):
def _checkgrad(self, param): def _checkgrad(self, param):
raise NotImplementedError, "Need log likelihood to check gradient against" raise NotImplementedError, "Need log likelihood to check gradient against"
class Indexable(object):
def _raveled_index(self):
raise NotImplementedError, "Need to be able to get the raveled Index"
class Constrainable(Nameable): def _internal_offset(self):
return 0
def _offset_for(self, param):
raise NotImplementedError, "shouldnt happen, offset required from non parameterization object?"
class Constrainable(Nameable, Indexable, Parameterizable):
def __init__(self, name, default_constraint=None): def __init__(self, name, default_constraint=None):
super(Constrainable,self).__init__(name) super(Constrainable,self).__init__(name)
self._default_constraint_ = default_constraint self._default_constraint_ = default_constraint
from index_operations import ParameterIndexOperations
self.constraints = ParameterIndexOperations()
#=========================================================================== #===========================================================================
# Fixing Parameters: # Fixing Parameters:
#=========================================================================== #===========================================================================
@ -105,17 +137,28 @@ class Constrainable(Nameable):
""" """
if value is not None: if value is not None:
self[:] = value self[:] = value
self._highest_parent_._fix(self,warning) self.constrain(__fixed__, warning=warning)
self._highest_parent_._set_fixed(self._raveled_index())
fix = constrain_fixed fix = constrain_fixed
def unconstrain_fixed(self): def unconstrain_fixed(self):
""" """
This parameter will no longer be fixed. This parameter will no longer be fixed.
""" """
self._highest_parent_._unfix(self) unconstrained = self.unconstrain(__fixed__)
self._highest_parent_._set_unfixed(unconstrained)
unfix = unconstrain_fixed unfix = unconstrain_fixed
#=========================================================================== #===========================================================================
# Constrain operations -> done # Constrain operations -> done
#=========================================================================== #===========================================================================
def _parent_changed(self, parent):
c = self.constraints
from index_operations import ParameterIndexOperationsView
self.constraints = ParameterIndexOperationsView(parent.constraints, parent._offset_for(self), self.size)
self.constraints.update(c)
del c
for p in self._parameters_:
p._parent_changed(parent)
def constrain(self, transform, warning=True, update=True): def constrain(self, transform, warning=True, update=True):
""" """
:param transform: the :py:class:`GPy.core.transformations.Transformation` :param transform: the :py:class:`GPy.core.transformations.Transformation`
@ -125,15 +168,21 @@ class Constrainable(Nameable):
Constrain the parameter to the given Constrain the parameter to the given
:py:class:`GPy.core.transformations.Transformation`. :py:class:`GPy.core.transformations.Transformation`.
""" """
if self.has_parent(): if isinstance(transform, Transformation):
self._highest_parent_._add_constrain(self, transform, warning) self._set_params(transform.initialize(self._get_params()), update=False)
reconstrained = self.unconstrain()
self.constraints.add(transform, self._raveled_index())
if reconstrained.size > 0:
print "WARNING: reconstraining parameters {}".format(self.parameter_names)
if update: if update:
self._highest_parent_.parameters_changed() self._highest_parent_.parameters_changed()
else: # if self.has_parent():
for p in self._parameters_: # self._highest_parent_._add_constrain(self, transform, warning)
self._add_constrain(p, transform, warning) # else:
if update: # for p in self._parameters_:
self.parameters_changed() # self._add_constrain(p, transform, warning)
# if update:
# self.parameters_changed()
def constrain_positive(self, warning=True, update=True): def constrain_positive(self, warning=True, update=True):
""" """
@ -167,11 +216,13 @@ class Constrainable(Nameable):
remove all :py:class:`GPy.core.transformations.Transformation` remove all :py:class:`GPy.core.transformations.Transformation`
transformats of this parameter object. transformats of this parameter object.
""" """
if self.has_parent(): if len(transforms) == 0:
self._highest_parent_._remove_constrain(self, *transforms) transforms = self.constraints.properties()
else: import numpy as np
for p in self._parameters_: removed = np.empty((0,),dtype=int)
self._remove_constrain(p, *transforms) for t in transforms:
removed = np.intersect1d(removed, self.constraints.remove(t, self._raveled_index()))
return removed
def unconstrain_positive(self): def unconstrain_positive(self):
""" """

View file

@ -8,16 +8,9 @@ import cPickle
import itertools import itertools
from re import compile, _pattern_type from re import compile, _pattern_type
from param import ParamConcatenation, Param from param import ParamConcatenation, Param
from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable, __fixed__
from index_operations import ParameterIndexOperations,\
index_empty
from array_core import ParamList from array_core import ParamList
#===============================================================================
# Printing:
__fixed__ = "fixed"
#===============================================================================
#=============================================================================== #===============================================================================
# constants # constants
FIXED = False FIXED = False
@ -69,7 +62,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
def __init__(self, name=None): def __init__(self, name=None):
super(Parameterized, self).__init__(name=name) super(Parameterized, self).__init__(name=name)
self._in_init_ = True self._in_init_ = True
self._constraints_ = None#ParameterIndexOperations()
self._parameters_ = ParamList() self._parameters_ = ParamList()
self.size = sum(p.size for p in self._parameters_) self.size = sum(p.size for p in self._parameters_)
if not self._has_fixes(): if not self._has_fixes():
@ -79,11 +71,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
self._added_names_ = set() self._added_names_ = set()
del self._in_init_ del self._in_init_
@property
def constraints(self):
if self._constraints_ is None:
self._constraints_ = ParameterIndexOperations()
return self._constraints_
#=========================================================================== #===========================================================================
# Parameter connection for model creation: # Parameter connection for model creation:
#=========================================================================== #===========================================================================
@ -128,12 +115,14 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
Add all parameters to this param class, you can insert parameters Add all parameters to this param class, you can insert parameters
at any given index using the :func:`list.insert` syntax at any given index using the :func:`list.insert` syntax
""" """
# if param.has_parent():
# raise AttributeError, "parameter {} already in another model, create new object (or copy) for adding".format(param._short())
if param in self._parameters_ and index is not None: if param in self._parameters_ and index is not None:
# make sure fixes and constraints are indexed right # make sure fixes and constraints are indexed right
if self._has_fixes(): if self._has_fixes():
param_slice = slice(self._offset_for(param),self._offset_for(param)+param.size) param_slice = slice(self._offset_for(param), self._offset_for(param) + param.size)
dest_index = sum((p.size for p in self._parameters_[:index])) dest_index = sum((p.size for p in self._parameters_[:index]))
dest_slice = slice(dest_index,dest_index+param.size) dest_slice = slice(dest_index, dest_index + param.size)
fixes_param = self._fixes_[param_slice].copy() fixes_param = self._fixes_[param_slice].copy()
self._fixes_[param_slice] = self._fixes_[dest_slice] self._fixes_[param_slice] = self._fixes_[dest_slice]
self._fixes_[dest_slice] = fixes_param self._fixes_[dest_slice] = fixes_param
@ -164,22 +153,18 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
ins = sum((p.size for p in self._parameters_[:index])) ins = sum((p.size for p in self._parameters_[:index]))
if self._has_fixes(): self._fixes_ = np.r_[self._fixes_[:ins], fixes_param, self._fixes[ins:]] if self._has_fixes(): self._fixes_ = np.r_[self._fixes_[:ins], fixes_param, self._fixes[ins:]]
elif not np.all(fixes_param): elif not np.all(fixes_param):
self._fixes_ = np.ones(self.size+param.size, dtype=bool) self._fixes_ = np.ones(self.size + param.size, dtype=bool)
self._fixes_[ins:ins+param.size] = fixes_param self._fixes_[ins:ins + param.size] = fixes_param
self.size += param.size self.size += param.size
else: else:
raise RuntimeError, """Parameter exists already added and no copy made""" raise RuntimeError, """Parameter exists already added and no copy made"""
self._connect_parameters() self._connect_parameters()
# make sure the constraints are pulled over: for p in self._parameters_:
if hasattr(param, "_constraints_") and param._constraints_ is not None: p._parent_changed(self)
for t, ind in param._constraints_.iteritems():
self.constraints.add(t, ind+self._offset_for(param))
param._constraints_.clear()
if param._default_constraint_ is not None: if param._default_constraint_ is not None:
self._add_constrain(param, param._default_constraint_, False) param.constrain(param._default_constraint_, False)
if self._has_fixes() and np.all(self._fixes_): # ==UNFIXED if self._has_fixes() and np.all(self._fixes_): # ==UNFIXED
self._fixes_= None self._fixes_ = None
def add_parameters(self, *parameters): def add_parameters(self, *parameters):
""" """
@ -202,30 +187,22 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
or p in names_params_indices)]) or p in names_params_indices)])
self._connect_parameters() self._connect_parameters()
def parameters_changed(self):
"""
This method gets called when parameters have changed.
Another way of listening to param changes is to
add self as a listener to the param, such that
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
"""
# will be called as soon as parameters have changed
pass
def _connect_parameters(self): def _connect_parameters(self):
# connect parameterlist to this parameterized object # connect parameterlist to this parameterized object
# This just sets up the right connection for the params objects # This just sets up the right connection for the params objects
# to be used as parameters # to be used as parameters
# it also sets the constraints for each parameter to the constraints
# of their respective parents
if not hasattr(self, "_parameters_") or len(self._parameters_) < 1: if not hasattr(self, "_parameters_") or len(self._parameters_) < 1:
# no parameters for this class # no parameters for this class
return return
sizes = [0] sizes = [0]
self._param_slices_ = [] self._param_slices_ = []
for i,p in enumerate(self._parameters_): for i, p in enumerate(self._parameters_):
p._direct_parent_ = self p._direct_parent_ = self
p._parent_index_ = i p._parent_index_ = i
not_unique = [] not_unique = []
sizes.append(p.size+sizes[-1]) sizes.append(p.size + sizes[-1])
self._param_slices_.append(slice(sizes[-2], sizes[-1])) self._param_slices_.append(slice(sizes[-2], sizes[-1]))
pname = adjust_name_for_printing(p.name) pname = adjust_name_for_printing(p.name)
# and makes sure to not delete programmatically added parameters # and makes sure to not delete programmatically added parameters
@ -237,7 +214,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
elif not (pname in not_unique): elif not (pname in not_unique):
self.__dict__[pname] = p self.__dict__[pname] = p
self._added_names_.add(pname) self._added_names_.add(pname)
#=========================================================================== #===========================================================================
# Pickling operations # Pickling operations
#=========================================================================== #===========================================================================
@ -255,16 +231,16 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
cPickle.dump(self, f, protocol) cPickle.dump(self, f, protocol)
def copy(self): def copy(self):
"""Returns a (deep) copy of the current model """ """Returns a (deep) copy of the current model """
#dc = dict() # dc = dict()
#for k, v in self.__dict__.iteritems(): # for k, v in self.__dict__.iteritems():
#if k not in ['_highest_parent_', '_direct_parent_']: # if k not in ['_highest_parent_', '_direct_parent_']:
#dc[k] = copy.deepcopy(v) # dc[k] = copy.deepcopy(v)
#dc = copy.deepcopy(self.__dict__) # dc = copy.deepcopy(self.__dict__)
#dc['_highest_parent_'] = None # dc['_highest_parent_'] = None
#dc['_direct_parent_'] = None # dc['_direct_parent_'] = None
#s = self.__class__.new() # s = self.__class__.new()
#s.__dict__ = dc # s.__dict__ = dc
return copy.deepcopy(self) return copy.deepcopy(self)
def __getstate__(self): def __getstate__(self):
if self._has_get_set_state(): if self._has_get_set_state():
@ -273,7 +249,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
def __setstate__(self, state): def __setstate__(self, state):
if self._has_get_set_state(): if self._has_get_set_state():
self._setstate(state) # set state self._setstate(state) # set state
#self._set_params(self._get_params()) # restore all values # self._set_params(self._get_params()) # restore all values
return return
self.__dict__ = state self.__dict__ = state
def _has_get_set_state(self): def _has_get_set_state(self):
@ -289,7 +265,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
""" """
return [ return [
self._fixes_, self._fixes_,
self._constraints_, self.constraints,
self._parameters_, self._parameters_,
self._name, self._name,
self._added_names_, self._added_names_,
@ -300,7 +276,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
self._name = state.pop() self._name = state.pop()
self._parameters_ = state.pop() self._parameters_ = state.pop()
self._connect_parameters() self._connect_parameters()
self._constraints_ = state.pop() self.constraints = state.pop()
self._fixes_ = state.pop() self._fixes_ = state.pop()
self.parameters_changed() self.parameters_changed()
#=========================================================================== #===========================================================================
@ -310,9 +286,9 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if self.has_parent(): if self.has_parent():
return g return g
x = self._get_params() x = self._get_params()
[numpy.put(g, i, g[i]*c.gradfactor(x[i])) for c,i in self.constraints.iteritems() if c != __fixed__] [numpy.put(g, i, g[i] * c.gradfactor(x[i])) for c, i in self.constraints.iteritems() if c != __fixed__]
for p in self.flattened_parameters: for p in self.flattened_parameters:
for t,i in p._tied_to_me_.iteritems(): for t, i in p._tied_to_me_.iteritems():
g[self._offset_for(p) + numpy.array(list(i))] += g[self._raveled_index_for(t)] g[self._offset_for(p) + numpy.array(list(i))] += g[self._raveled_index_for(t)]
if self._has_fixes(): return g[self._fixes_] if self._has_fixes(): return g[self._fixes_]
return g return g
@ -320,7 +296,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# Optimization handles: # Optimization handles:
#=========================================================================== #===========================================================================
def _get_param_names(self): def _get_param_names(self):
n = numpy.array([p.name_hirarchical+'['+str(i)+']' for p in self.flattened_parameters for i in p._indices()]) n = numpy.array([p.name_hirarchical + '[' + str(i) + ']' for p in self.flattened_parameters for i in p._indices()])
return n return n
def _get_param_names_transformed(self): def _get_param_names_transformed(self):
n = self._get_param_names() n = self._get_param_names()
@ -331,16 +307,16 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# don't overwrite this anymore! # don't overwrite this anymore!
if not self.size: if not self.size:
return np.empty(shape=(0,), dtype=np.float64) return np.empty(shape=(0,), dtype=np.float64)
return numpy.hstack([x._get_params() for x in self._parameters_ if x.size>0]) return numpy.hstack([x._get_params() for x in self._parameters_ if x.size > 0])
def _set_params(self, params, update=True): def _set_params(self, params, update=True):
# don't overwrite this anymore! # don't overwrite this anymore!
[p._set_params(params[s], update=update) for p,s in itertools.izip(self._parameters_,self._param_slices_)] [p._set_params(params[s], update=update) for p, s in itertools.izip(self._parameters_, self._param_slices_)]
self.parameters_changed() self.parameters_changed()
def _get_params_transformed(self): def _get_params_transformed(self):
# transformed parameters (apply transformation rules) # transformed parameters (apply transformation rules)
p = self._get_params() p = self._get_params()
[numpy.put(p, ind, c.finv(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__] [numpy.put(p, ind, c.finv(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
if self._has_fixes(): if self._has_fixes():
return p[self._fixes_] return p[self._fixes_]
return p return p
@ -350,7 +326,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
def _untransform_params(self, p): def _untransform_params(self, p):
p = p.copy() p = p.copy()
if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp
[numpy.put(p, ind, c.f(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__] [numpy.put(p, ind, c.f(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
return p return p
def _name_changed(self, param, old_name): def _name_changed(self, param, old_name):
if hasattr(self, old_name) and old_name in self._added_names_: if hasattr(self, old_name) and old_name in self._added_names_:
@ -365,7 +341,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
#=========================================================================== #===========================================================================
def _backtranslate_index(self, param, ind): def _backtranslate_index(self, param, ind):
# translate an index in parameterized indexing into the index of param # translate an index in parameterized indexing into the index of param
ind = ind-self._offset_for(param) ind = ind - self._offset_for(param)
ind = ind[ind >= 0] ind = ind[ind >= 0]
internal_offset = param._internal_offset() internal_offset = param._internal_offset()
ind = ind[ind < param.size + internal_offset] ind = ind[ind < param.size + internal_offset]
@ -420,13 +396,13 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if self._has_fixes(): if self._has_fixes():
return self._fixes_[self._raveled_index_for(param)] return self._fixes_[self._raveled_index_for(param)]
return numpy.ones(self.size, dtype=bool)[self._raveled_index_for(param)] return numpy.ones(self.size, dtype=bool)[self._raveled_index_for(param)]
def _fix(self, param, warning=True): # def _fix(self, param, warning=True):
f = self._add_constrain(param, __fixed__, warning) # f = self._add_constrain(param, __fixed__, warning)
self._set_fixed(f) # self._set_fixed(f)
def _unfix(self, param): # def _unfix(self, param):
if self._has_fixes(): # if self._has_fixes():
f = self._remove_constrain(param, __fixed__) # f = self._remove_constrain(param, __fixed__)
self._set_unfixed(f) # self._set_unfixed(f)
#=========================================================================== #===========================================================================
# Convenience for fixed, tied checking of param: # Convenience for fixed, tied checking of param:
#=========================================================================== #===========================================================================
@ -437,7 +413,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if not self._has_fixes(): if not self._has_fixes():
return False return False
return not self._fixes_[self._raveled_index_for(param)].any() return not self._fixes_[self._raveled_index_for(param)].any()
#return not self._fixes_[self._offset_for(param): self._offset_for(param)+param._realsize_].any() # return not self._fixes_[self._offset_for(param): self._offset_for(param)+param._realsize_].any()
@property @property
def is_fixed(self): def is_fixed(self):
for p in self._parameters_: for p in self._parameters_:
@ -455,54 +431,33 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
#=========================================================================== #===========================================================================
# Constraint Handling: # Constraint Handling:
#=========================================================================== #===========================================================================
def _add_constrain(self, param, transform, warning=True): #===========================================================================
rav_i = self._raveled_index_for(param) # def _add_constrain(self, param, transform, warning=True):
reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before # rav_i = self._raveled_index_for(param)
# if removing constraints before adding new is not wanted, just delete the above line! # reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before
self.constraints.add(transform, rav_i) # # if removing constraints before adding new is not wanted, just delete the above line!
param = self._get_original(param) # self.constraints.add(transform, rav_i)
if not (transform == __fixed__): # param = self._get_original(param)
param._set_params(transform.initialize(param._get_params()), update=False) # if not (transform == __fixed__):
if warning and any(reconstrained): # param._set_params(transform.initialize(param._get_params()), update=False)
# if you want to print the whole params object, which was reconstrained use: # if warning and any(reconstrained):
# m = str(param[self._backtranslate_index(param, reconstrained)]) # # if you want to print the whole params object, which was reconstrained use:
print "Warning: re-constraining parameters:\n{}".format(param._short()) # # m = str(param[self._backtranslate_index(param, reconstrained)])
return rav_i # print "Warning: re-constraining parameters:\n{}".format(param._short())
def _remove_constrain(self, param, *transforms, **kwargs): # return rav_i
if not transforms: # def _remove_constrain(self, param, *transforms, **kwargs):
transforms = self.constraints.properties() # if not transforms:
removed_indices = numpy.array([]).astype(int) # transforms = self.constraints.properties()
if "index" in kwargs: index = kwargs['index'] # removed_indices = numpy.array([]).astype(int)
else: index = self._raveled_index_for(param) # if "index" in kwargs: index = kwargs['index']
for constr in transforms: # else: index = self._raveled_index_for(param)
removed = self.constraints.remove(constr, index) # for constr in transforms:
if constr is __fixed__: # removed = self.constraints.remove(constr, index)
self._set_unfixed(removed) # if constr is __fixed__:
removed_indices = numpy.union1d(removed_indices, removed) # self._set_unfixed(removed)
return removed_indices # removed_indices = numpy.union1d(removed_indices, removed)
# convienience for iterating over items # return removed_indices
def _constraints_iter_items(self, param): #===========================================================================
for constr, ind in self.constraints.iteritems():
ind = self._backtranslate_index(param, ind)
if not index_empty(ind):
yield constr, ind
def _constraints_iter(self, param):
for constr, _ in self._constraints_iter_items(param):
yield constr
def _contraints_iter_indices(self, param):
# iterate through all constraints belonging to param
for _, ind in self._constraints_iter_items(param):
yield ind
def _constraint_indices(self, param, constraint):
# indices in model range for param and constraint
return self._backtranslate_index(param, self.constraints[constraint]) + self._offset_for(param)
def _constraints_for(self, param, rav_index):
# constraint for param given its internal rav_index
return self.constraints.properties_for(rav_index+self._offset_for(param))
def _constraints_for_collect(self, param, rav_index):
# constraint for param given its internal rav_index
cs = self._constraints_for(param, rav_index)
return set(itertools.chain(*cs))
#=========================================================================== #===========================================================================
# Get/set parameters: # Get/set parameters:
#=========================================================================== #===========================================================================
@ -539,7 +494,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# def __getattribute__(self, name): # def __getattribute__(self, name):
# #try: # #try:
# return object.__getattribute__(self, name) # return object.__getattribute__(self, name)
#except AttributeError: # except AttributeError:
# _, a, tb = sys.exc_info() # _, a, tb = sys.exc_info()
# try: # try:
# return self.__getitem__(name) # return self.__getitem__(name)
@ -603,11 +558,11 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
to_print = [] to_print = []
for n, d, c, t in itertools.izip(names, desc, constrs, ts): for n, d, c, t in itertools.izip(names, desc, constrs, ts):
to_print.append(format_spec.format(name=n, desc=d, const=c, t=t)) to_print.append(format_spec.format(name=n, desc=d, const=c, t=t))
#to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)] # to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)]
sep = '-'*(nl+sl+cl+tl+8*2+3) sep = '-' * (nl + sl + cl + tl + 8 * 2 + 3)
if header: if header:
header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}}".format(nl, sl, cl, tl).format(name, "Value", "Constraint", "Tied to") header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}}".format(nl, sl, cl, tl).format(name, "Value", "Constraint", "Tied to")
#header += '\n' + sep # header += '\n' + sep
to_print.insert(0, header) to_print.insert(0, header)
return '\n'.format(sep).join(to_print) return '\n'.format(sep).join(to_print)
pass pass

View file

@ -27,6 +27,8 @@ class Transformation(object):
raise NotImplementedError raise NotImplementedError
def __str__(self): def __str__(self):
raise NotImplementedError raise NotImplementedError
def __repr__(self):
return self.__class__.__name__
class Logexp(Transformation): class Logexp(Transformation):
domain = _POSITIVE domain = _POSITIVE
@ -94,7 +96,6 @@ class LogexpClipped(Logexp):
def __str__(self): def __str__(self):
return '+ve_c' return '+ve_c'
class Exponent(Transformation): class Exponent(Transformation):
# TODO: can't allow this to go to zero, need to set a lower bound. Similar with negative Exponent below. See old MATLAB code. # TODO: can't allow this to go to zero, need to set a lower bound. Similar with negative Exponent below. See old MATLAB code.
domain = _POSITIVE domain = _POSITIVE

View file

@ -23,7 +23,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
""" """
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
Z=None, kernel=None, inference_method=None, likelihood=Gaussian(), name='bayesian gplvm', **kwargs): Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
if X == None: if X == None:
X = self.initialise_latent(init, input_dim, Y) X = self.initialise_latent(init, input_dim, Y)
self.init = init self.init = init
@ -38,6 +38,8 @@ class BayesianGPLVM(SparseGP, GPLVM):
if kernel is None: if kernel is None:
kernel = kern.rbf(input_dim) # + kern.white(input_dim) kernel = kern.rbf(input_dim) # + kern.white(input_dim)
if likelihood is None:
likelihood = Gaussian()
self.q = Normal(X, X_variance) self.q = Normal(X, X_variance)
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, X_variance, name, **kwargs) SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, X_variance, name, **kwargs)
self.add_parameter(self.q, index=0) self.add_parameter(self.q, index=0)