redid constraints

This commit is contained in:
Max Zwiessele 2014-02-12 17:11:55 +00:00
parent c0eddf29e7
commit a264cdaa98
8 changed files with 290 additions and 190 deletions

View file

@ -17,10 +17,10 @@ import itertools
# import numdifftools as ndt
class Model(Parameterized):
_fail_count = 0 # Count of failed optimization steps (see objective)
_allowed_failures = 10 # number of allowed failures
_fail_count = 0 # Count of failed optimization steps (see objective)
_allowed_failures = 10 # number of allowed failures
def __init__(self, name):
super(Model, self).__init__(name)#Parameterized.__init__(self)
super(Model, self).__init__(name) # Parameterized.__init__(self)
self.priors = []
self._priors = ParameterIndexOperations()
self.optimization_runs = []
@ -30,10 +30,10 @@ class Model(Parameterized):
def log_likelihood(self):
raise NotImplementedError, "this needs to be implemented to use the model class"
def _log_likelihood_gradients(self):
#def dK_d(self, param, dL_dK, X, X2)
# def dK_d(self, param, dL_dK, X, X2)
g = np.zeros(self.size)
try:
#[g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed]
# [g.__setitem__(s, self.gradient_mapping[p]().flat) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed]
[p._collect_gradient(g[s]) for p, s in itertools.izip(self._parameters_, self._param_slices_) if not p.is_fixed]
except ValueError:
raise ValueError, 'Gradient for {} not defined, please specify gradients for parameters to optimize'.format(p.name)
@ -100,7 +100,7 @@ class Model(Parameterized):
if len(tie_matches) > 1:
raise ValueError, "cannot place prior across multiple ties"
elif len(tie_matches) == 1:
which = which[:1] # just place a prior object on the first parameter
which = which[:1] # just place a prior object on the first parameter
# check constraints are okay
@ -168,14 +168,14 @@ class Model(Parameterized):
Make this draw from the prior if one exists, else draw from N(0,1)
"""
# first take care of all parameters (from N(0,1))
#x = self._get_params_transformed()
# x = self._get_params_transformed()
x = np.random.randn(self.size_transformed)
x = self._untransform_params(x)
# now draw from prior where possible
if self.priors is not None and len(self.priors):
[np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None]
self._set_params(x)
#self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
# self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs):
"""
@ -220,8 +220,8 @@ class Model(Parameterized):
job = pool.apply_async(opt_wrapper, args=(self,), kwds=kwargs)
jobs.append(job)
pool.close() # signal that no more data coming in
pool.join() # wait for all the tasks to complete
pool.close() # signal that no more data coming in
pool.join() # wait for all the tasks to complete
except KeyboardInterrupt:
print "Ctrl+c received, terminating and joining pool."
pool.terminate()
@ -378,7 +378,7 @@ class Model(Parameterized):
def optimize_SGD(self, momentum=0.1, learning_rate=0.01, iterations=20, **kwargs):
# assert self.Y.shape[1] > 1, "SGD only works with D > 1"
sgd = SGD.StochasticGD(self, iterations, learning_rate, momentum, **kwargs) # @UndefinedVariable
sgd = SGD.StochasticGD(self, iterations, learning_rate, momentum, **kwargs) # @UndefinedVariable
sgd.run()
self.optimization_runs.append(sgd)
@ -412,7 +412,7 @@ class Model(Parameterized):
gradient = self.objective_function_gradients(x)
numerical_gradient = (f1 - f2) / (2 * dx)
global_ratio = (f1 - f2) / (2 * np.dot(dx, np.where(gradient==0, 1e-32, gradient)))
global_ratio = (f1 - f2) / (2 * np.dot(dx, np.where(gradient == 0, 1e-32, gradient)))
return (np.abs(1. - global_ratio) < tolerance) or (np.abs(gradient - numerical_gradient).mean() < tolerance)
else:
@ -444,18 +444,18 @@ class Model(Parameterized):
return
gradient = self.objective_function_gradients(x)
np.where(gradient==0, 1e-312, gradient)
np.where(gradient == 0, 1e-312, gradient)
ret = True
for i, ind in enumerate(param_list):
xx = x.copy()
xx[ind] += step
xx[i] += step
f1 = self.objective_function(xx)
xx[ind] -= 2.*step
xx[i] -= 2.*step
f2 = self.objective_function(xx)
numerical_gradient = (f1 - f2) / (2 * step)
ratio = (f1 - f2) / (2 * step * gradient[ind])
difference = np.abs((f1 - f2) / 2 / step - gradient[ind])
ratio = (f1 - f2) / (2 * step * gradient[i])
difference = np.abs((f1 - f2) / 2 / step - gradient[i])
if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance:
formatted_name = "\033[92m {0} \033[0m".format(names[ind])
@ -466,7 +466,7 @@ class Model(Parameterized):
r = '%.6f' % float(ratio)
d = '%.6f' % float(difference)
g = '%.6f' % gradient[ind]
g = '%.6f' % gradient[i]
ng = '%.6f' % float(numerical_gradient)
grad_string = "{0:<{c0}}|{1:^{c1}}|{2:^{c2}}|{3:^{c3}}|{4:^{c4}}".format(formatted_name, r, d, g, ng, c0=cols[0] + 9, c1=cols[1], c2=cols[2], c3=cols[3], c4=cols[4])
print grad_string
@ -517,10 +517,10 @@ class Model(Parameterized):
alpha = 0
stop = False
#Handle **kwargs
# Handle **kwargs
ep_args = {}
for arg in kwargs.keys():
if arg in ('epsilon','power_ep'):
if arg in ('epsilon', 'power_ep'):
ep_args[arg] = kwargs[arg]
del kwargs[arg]
@ -528,7 +528,7 @@ class Model(Parameterized):
last_approximation = self.likelihood.copy()
last_params = self._get_params()
if len(ep_args) == 2:
self.update_likelihood_approximation(epsilon=ep_args['epsilon'],power_ep=ep_args['power_ep'])
self.update_likelihood_approximation(epsilon=ep_args['epsilon'], power_ep=ep_args['power_ep'])
elif len(ep_args) == 1:
if ep_args.keys()[0] == 'epsilon':
self.update_likelihood_approximation(epsilon=ep_args['epsilon'])
@ -540,8 +540,8 @@ class Model(Parameterized):
ll_change = new_ll - last_ll
if ll_change < 0:
self.likelihood = last_approximation # restore previous likelihood approximation
self._set_params(last_params) # restore model parameters
self.likelihood = last_approximation # restore previous likelihood approximation
self._set_params(last_params) # restore model parameters
print "Log-likelihood decrement: %s \nLast likelihood update discarded." % ll_change
stop = True
else:

View file

@ -4,7 +4,7 @@
__updated__ = '2013-12-16'
import numpy as np
from parameter_core import Observable, Constrainable, Gradcheckable
from parameter_core import Observable, Parameterizable
class ParamList(list):
"""

View file

@ -58,7 +58,7 @@ class ParameterIndexOperations(object):
index array, for multi-param handling.
'''
def __init__(self):
self._properties = ParamDict()
self._properties = IntArrayDict()
#self._reverse = collections.defaultdict(list)
def __getstate__(self):
@ -71,16 +71,19 @@ class ParameterIndexOperations(object):
def iteritems(self):
return self._properties.iteritems()
def items(self):
return self._properties.items()
def properties(self):
return self._properties.keys()
def iter_properties(self):
def iterproperties(self):
return self._properties.iterkeys()
def shift(self, start, size):
for ind in self.iterindices():
toshift = ind>=start
if len(toshift) > 0:
if toshift.size > 0:
ind[toshift] += size
def clear(self):
@ -96,7 +99,7 @@ class ParameterIndexOperations(object):
return self._properties.values()
def properties_for(self, index):
return vectorize(lambda i: [prop for prop in self.iter_properties() if i in self._properties[prop]], otypes=[list])(index)
return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
def add(self, prop, indices):
try:
@ -114,8 +117,13 @@ class ParameterIndexOperations(object):
del self._properties[prop]
return removed.astype(int)
return numpy.array([]).astype(int)
def __getitem__(self, prop):
return self._properties[prop]
def __str__(self, *args, **kwargs):
import pprint
return pprint.pformat(dict(self._properties))
def combine_indices(arr1, arr2):
return numpy.union1d(arr1, arr2)
@ -126,5 +134,94 @@ def remove_indices(arr, to_remove):
def index_empty(index):
return numpy.size(index) == 0
class ParameterIndexOperationsView(object):
def __init__(self, param_index_operations, offset, size):
self._param_index_ops = param_index_operations
self._offset = offset
self._size = size
def __getstate__(self):
return [self._param_index_ops, self._offset, self._size]
def __setstate__(self, state):
self._param_index_ops = state[0]
self._offset = state[1]
self._size = state[2]
def _filter_index(self, ind):
return ind[(ind >= self._offset) * (ind < (self._offset + self._size))] - self._offset
def iteritems(self):
for i, ind in self._param_index_ops.iteritems():
ind2 = self._filter_index(ind)
if ind2.size > 0:
yield i, ind2
def items(self):
return [[i,v] for i,v in self.iteritems()]
def properties(self):
return [i for i in self.iterproperties()]
def iterproperties(self):
for i, _ in self.iteritems():
yield i
def shift(self, start, size):
raise NotImplementedError, 'Shifting only supported in original ParamIndexOperations'
def clear(self):
for i, ind in self.items():
self._param_index_ops.remove(i, ind+self._offset)
def size(self):
return reduce(lambda a,b: a+b.size, self.iterindices(), 0)
def iterindices(self):
for _, ind in self.iteritems():
yield ind
def indices(self):
[ind for ind in self.iterindices()]
def properties_for(self, index):
return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
def add(self, prop, indices):
self._param_index_ops.add(prop, indices+self._offset)
def remove(self, prop, indices):
removed = self._param_index_ops.remove(prop, indices+self._offset)
if removed.size > 0:
return removed - self._size
return removed
def __getitem__(self, prop):
ind = self._filter_index(self._param_index_ops[prop])
if ind.size > 0:
return ind
raise KeyError, prop
def __str__(self, *args, **kwargs):
import pprint
return pprint.pformat(dict(self.iteritems()))
def update(self, parameter_index_view):
for i, v in parameter_index_view.iteritems():
self.add(i, v)
pass

View file

@ -3,7 +3,7 @@
import itertools
import numpy
from parameter_core import Constrainable, Gradcheckable, adjust_name_for_printing
from parameter_core import Constrainable, Gradcheckable, Indexable, Parameterizable, adjust_name_for_printing
from array_core import ObservableArray, ParamList
###### printing
@ -14,13 +14,7 @@ __precision__ = numpy.get_printoptions()['precision'] # numpy printing precision
__print_threshold__ = 5
######
class Float(numpy.float64, Constrainable):
def __init__(self, f, base):
super(Float,self).__init__(f)
self._base = base
class Param(ObservableArray, Constrainable, Gradcheckable):
class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parameterizable):
"""
Parameter object for GPy models.
@ -364,7 +358,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
return [self.shape]
@property
def _constraints_str(self):
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self._highest_parent_._constraints_iter_items(self)))]
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self.constraints.iteritems()))]
@property
def _ties_str(self):
return [t._short() for t in self._tied_to_] or ['']
@ -390,7 +384,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
else: ties[i, matches[0]] = numpy.take(tt_rav_index, matches[1], mode='wrap')
return map(lambda a: sum(a, []), zip(*[[[tie.flatten()] if tx != None else [] for tx in t] for t, tie in zip(ties, self._tied_to_)]))
def _constraints_for(self, rav_index):
return self._highest_parent_._constraints_for(self, rav_index)
return self.constraints.properties_for(rav_index)
def _indices(self, slice_index=None):
# get a int-array containing all indices in the first axis.
if slice_index is None:

View file

@ -1,7 +1,7 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from transformations import Logexp, NegativeLogexp, Logistic
from transformations import Transformation, Logexp, NegativeLogexp, Logistic
__updated__ = '2013-12-16'
@ -10,6 +10,11 @@ def adjust_name_for_printing(name):
return name.replace(" ", "_").replace(".", "_").replace("-","").replace("+","").replace("!","").replace("*","").replace("/","")
return ''
#===============================================================================
# Printing:
__fixed__ = "fixed"
#===============================================================================
class Observable(object):
_observers_ = {}
def add_observer(self, observer, callble):
@ -20,7 +25,23 @@ class Observable(object):
def _notify_observers(self):
[callble(self) for callble in self._observers_.itervalues()]
class Parameterizable(object):
def __init__(self, *args, **kwargs):
from GPy.core.parameterization.array_core import ParamList
_parameters_ = ParamList()
def parameter_names(self):
return [p.name for p in self._parameters_]
def parameters_changed(self):
"""
This method gets called when parameters have changed.
Another way of listening to param changes is to
add self as a listener to the param, such that
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
"""
pass
class Pickleable(object):
def _getstate(self):
"""
@ -52,7 +73,7 @@ class Parentable(object):
super(Parentable,self).__init__()
self._direct_parent_ = direct_parent
self._parent_index_ = parent_index
def has_parent(self):
return self._direct_parent_ is not None
@ -89,11 +110,22 @@ class Gradcheckable(Parentable):
def _checkgrad(self, param):
raise NotImplementedError, "Need log likelihood to check gradient against"
class Indexable(object):
def _raveled_index(self):
raise NotImplementedError, "Need to be able to get the raveled Index"
def _internal_offset(self):
return 0
def _offset_for(self, param):
raise NotImplementedError, "shouldnt happen, offset required from non parameterization object?"
class Constrainable(Nameable):
class Constrainable(Nameable, Indexable, Parameterizable):
def __init__(self, name, default_constraint=None):
super(Constrainable,self).__init__(name)
self._default_constraint_ = default_constraint
from index_operations import ParameterIndexOperations
self.constraints = ParameterIndexOperations()
#===========================================================================
# Fixing Parameters:
#===========================================================================
@ -105,17 +137,28 @@ class Constrainable(Nameable):
"""
if value is not None:
self[:] = value
self._highest_parent_._fix(self,warning)
self.constrain(__fixed__, warning=warning)
self._highest_parent_._set_fixed(self._raveled_index())
fix = constrain_fixed
def unconstrain_fixed(self):
"""
This parameter will no longer be fixed.
"""
self._highest_parent_._unfix(self)
unconstrained = self.unconstrain(__fixed__)
self._highest_parent_._set_unfixed(unconstrained)
unfix = unconstrain_fixed
#===========================================================================
# Constrain operations -> done
#===========================================================================
def _parent_changed(self, parent):
c = self.constraints
from index_operations import ParameterIndexOperationsView
self.constraints = ParameterIndexOperationsView(parent.constraints, parent._offset_for(self), self.size)
self.constraints.update(c)
del c
for p in self._parameters_:
p._parent_changed(parent)
def constrain(self, transform, warning=True, update=True):
"""
:param transform: the :py:class:`GPy.core.transformations.Transformation`
@ -125,15 +168,21 @@ class Constrainable(Nameable):
Constrain the parameter to the given
:py:class:`GPy.core.transformations.Transformation`.
"""
if self.has_parent():
self._highest_parent_._add_constrain(self, transform, warning)
if update:
self._highest_parent_.parameters_changed()
else:
for p in self._parameters_:
self._add_constrain(p, transform, warning)
if update:
self.parameters_changed()
if isinstance(transform, Transformation):
self._set_params(transform.initialize(self._get_params()), update=False)
reconstrained = self.unconstrain()
self.constraints.add(transform, self._raveled_index())
if reconstrained.size > 0:
print "WARNING: reconstraining parameters {}".format(self.parameter_names)
if update:
self._highest_parent_.parameters_changed()
# if self.has_parent():
# self._highest_parent_._add_constrain(self, transform, warning)
# else:
# for p in self._parameters_:
# self._add_constrain(p, transform, warning)
# if update:
# self.parameters_changed()
def constrain_positive(self, warning=True, update=True):
"""
@ -167,12 +216,14 @@ class Constrainable(Nameable):
remove all :py:class:`GPy.core.transformations.Transformation`
transformats of this parameter object.
"""
if self.has_parent():
self._highest_parent_._remove_constrain(self, *transforms)
else:
for p in self._parameters_:
self._remove_constrain(p, *transforms)
if len(transforms) == 0:
transforms = self.constraints.properties()
import numpy as np
removed = np.empty((0,),dtype=int)
for t in transforms:
removed = np.intersect1d(removed, self.constraints.remove(t, self._raveled_index()))
return removed
def unconstrain_positive(self):
"""
Remove positive constraint of this parameter.

View file

@ -8,16 +8,9 @@ import cPickle
import itertools
from re import compile, _pattern_type
from param import ParamConcatenation, Param
from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable
from index_operations import ParameterIndexOperations,\
index_empty
from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable, __fixed__
from array_core import ParamList
#===============================================================================
# Printing:
__fixed__ = "fixed"
#===============================================================================
#===============================================================================
# constants
FIXED = False
@ -69,7 +62,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
def __init__(self, name=None):
super(Parameterized, self).__init__(name=name)
self._in_init_ = True
self._constraints_ = None#ParameterIndexOperations()
self._parameters_ = ParamList()
self.size = sum(p.size for p in self._parameters_)
if not self._has_fixes():
@ -79,11 +71,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
self._added_names_ = set()
del self._in_init_
@property
def constraints(self):
if self._constraints_ is None:
self._constraints_ = ParameterIndexOperations()
return self._constraints_
#===========================================================================
# Parameter connection for model creation:
#===========================================================================
@ -128,12 +115,14 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
Add all parameters to this param class, you can insert parameters
at any given index using the :func:`list.insert` syntax
"""
# if param.has_parent():
# raise AttributeError, "parameter {} already in another model, create new object (or copy) for adding".format(param._short())
if param in self._parameters_ and index is not None:
# make sure fixes and constraints are indexed right
if self._has_fixes():
param_slice = slice(self._offset_for(param),self._offset_for(param)+param.size)
param_slice = slice(self._offset_for(param), self._offset_for(param) + param.size)
dest_index = sum((p.size for p in self._parameters_[:index]))
dest_slice = slice(dest_index,dest_index+param.size)
dest_slice = slice(dest_index, dest_index + param.size)
fixes_param = self._fixes_[param_slice].copy()
self._fixes_[param_slice] = self._fixes_[dest_slice]
self._fixes_[dest_slice] = fixes_param
@ -164,22 +153,18 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
ins = sum((p.size for p in self._parameters_[:index]))
if self._has_fixes(): self._fixes_ = np.r_[self._fixes_[:ins], fixes_param, self._fixes[ins:]]
elif not np.all(fixes_param):
self._fixes_ = np.ones(self.size+param.size, dtype=bool)
self._fixes_[ins:ins+param.size] = fixes_param
self._fixes_ = np.ones(self.size + param.size, dtype=bool)
self._fixes_[ins:ins + param.size] = fixes_param
self.size += param.size
else:
raise RuntimeError, """Parameter exists already added and no copy made"""
self._connect_parameters()
# make sure the constraints are pulled over:
if hasattr(param, "_constraints_") and param._constraints_ is not None:
for t, ind in param._constraints_.iteritems():
self.constraints.add(t, ind+self._offset_for(param))
param._constraints_.clear()
for p in self._parameters_:
p._parent_changed(self)
if param._default_constraint_ is not None:
self._add_constrain(param, param._default_constraint_, False)
if self._has_fixes() and np.all(self._fixes_): # ==UNFIXED
self._fixes_= None
param.constrain(param._default_constraint_, False)
if self._has_fixes() and np.all(self._fixes_): # ==UNFIXED
self._fixes_ = None
def add_parameters(self, *parameters):
"""
@ -202,30 +187,22 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
or p in names_params_indices)])
self._connect_parameters()
def parameters_changed(self):
"""
This method gets called when parameters have changed.
Another way of listening to param changes is to
add self as a listener to the param, such that
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
"""
# will be called as soon as parameters have changed
pass
def _connect_parameters(self):
# connect parameterlist to this parameterized object
# This just sets up the right connection for the params objects
# to be used as parameters
# it also sets the constraints for each parameter to the constraints
# of their respective parents
if not hasattr(self, "_parameters_") or len(self._parameters_) < 1:
# no parameters for this class
return
sizes = [0]
self._param_slices_ = []
for i,p in enumerate(self._parameters_):
for i, p in enumerate(self._parameters_):
p._direct_parent_ = self
p._parent_index_ = i
not_unique = []
sizes.append(p.size+sizes[-1])
sizes.append(p.size + sizes[-1])
self._param_slices_.append(slice(sizes[-2], sizes[-1]))
pname = adjust_name_for_printing(p.name)
# and makes sure to not delete programmatically added parameters
@ -237,7 +214,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
elif not (pname in not_unique):
self.__dict__[pname] = p
self._added_names_.add(pname)
#===========================================================================
# Pickling operations
#===========================================================================
@ -255,16 +231,16 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
cPickle.dump(self, f, protocol)
def copy(self):
"""Returns a (deep) copy of the current model """
#dc = dict()
#for k, v in self.__dict__.iteritems():
#if k not in ['_highest_parent_', '_direct_parent_']:
#dc[k] = copy.deepcopy(v)
# dc = dict()
# for k, v in self.__dict__.iteritems():
# if k not in ['_highest_parent_', '_direct_parent_']:
# dc[k] = copy.deepcopy(v)
#dc = copy.deepcopy(self.__dict__)
#dc['_highest_parent_'] = None
#dc['_direct_parent_'] = None
#s = self.__class__.new()
#s.__dict__ = dc
# dc = copy.deepcopy(self.__dict__)
# dc['_highest_parent_'] = None
# dc['_direct_parent_'] = None
# s = self.__class__.new()
# s.__dict__ = dc
return copy.deepcopy(self)
def __getstate__(self):
if self._has_get_set_state():
@ -272,8 +248,8 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
return self.__dict__
def __setstate__(self, state):
if self._has_get_set_state():
self._setstate(state) # set state
#self._set_params(self._get_params()) # restore all values
self._setstate(state) # set state
# self._set_params(self._get_params()) # restore all values
return
self.__dict__ = state
def _has_get_set_state(self):
@ -289,7 +265,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
"""
return [
self._fixes_,
self._constraints_,
self.constraints,
self._parameters_,
self._name,
self._added_names_,
@ -300,7 +276,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
self._name = state.pop()
self._parameters_ = state.pop()
self._connect_parameters()
self._constraints_ = state.pop()
self.constraints = state.pop()
self._fixes_ = state.pop()
self.parameters_changed()
#===========================================================================
@ -310,9 +286,9 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if self.has_parent():
return g
x = self._get_params()
[numpy.put(g, i, g[i]*c.gradfactor(x[i])) for c,i in self.constraints.iteritems() if c != __fixed__]
[numpy.put(g, i, g[i] * c.gradfactor(x[i])) for c, i in self.constraints.iteritems() if c != __fixed__]
for p in self.flattened_parameters:
for t,i in p._tied_to_me_.iteritems():
for t, i in p._tied_to_me_.iteritems():
g[self._offset_for(p) + numpy.array(list(i))] += g[self._raveled_index_for(t)]
if self._has_fixes(): return g[self._fixes_]
return g
@ -320,7 +296,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# Optimization handles:
#===========================================================================
def _get_param_names(self):
n = numpy.array([p.name_hirarchical+'['+str(i)+']' for p in self.flattened_parameters for i in p._indices()])
n = numpy.array([p.name_hirarchical + '[' + str(i) + ']' for p in self.flattened_parameters for i in p._indices()])
return n
def _get_param_names_transformed(self):
n = self._get_param_names()
@ -331,16 +307,16 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# don't overwrite this anymore!
if not self.size:
return np.empty(shape=(0,), dtype=np.float64)
return numpy.hstack([x._get_params() for x in self._parameters_ if x.size>0])
return numpy.hstack([x._get_params() for x in self._parameters_ if x.size > 0])
def _set_params(self, params, update=True):
# don't overwrite this anymore!
[p._set_params(params[s], update=update) for p,s in itertools.izip(self._parameters_,self._param_slices_)]
[p._set_params(params[s], update=update) for p, s in itertools.izip(self._parameters_, self._param_slices_)]
self.parameters_changed()
def _get_params_transformed(self):
# transformed parameters (apply transformation rules)
p = self._get_params()
[numpy.put(p, ind, c.finv(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__]
[numpy.put(p, ind, c.finv(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
if self._has_fixes():
return p[self._fixes_]
return p
@ -350,7 +326,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
def _untransform_params(self, p):
p = p.copy()
if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp
[numpy.put(p, ind, c.f(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__]
[numpy.put(p, ind, c.f(p[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__]
return p
def _name_changed(self, param, old_name):
if hasattr(self, old_name) and old_name in self._added_names_:
@ -365,7 +341,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
#===========================================================================
def _backtranslate_index(self, param, ind):
# translate an index in parameterized indexing into the index of param
ind = ind-self._offset_for(param)
ind = ind - self._offset_for(param)
ind = ind[ind >= 0]
internal_offset = param._internal_offset()
ind = ind[ind < param.size + internal_offset]
@ -377,7 +353,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
return self._param_slices_[param._direct_parent_._get_original(param)._parent_index_].start
return self._offset_for(param._direct_parent_) + param._direct_parent_._offset_for(param)
return 0
def _raveled_index_for(self, param):
"""
get the raveled index for a param
@ -387,7 +363,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if isinstance(param, ParamConcatenation):
return numpy.hstack((self._raveled_index_for(p) for p in param.params))
return param._raveled_index() + self._offset_for(param)
def _raveled_index(self):
"""
get the raveled index for this object,
@ -404,7 +380,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
except AttributeError:
pass
self._fixes_[param_or_index] = FIXED
if numpy.all(self._fixes_): self._fixes_ = None # ==UNFIXED
if numpy.all(self._fixes_): self._fixes_ = None # ==UNFIXED
def _set_unfixed(self, param_or_index):
if not self._has_fixes(): self._fixes_ = numpy.ones(self.size, dtype=bool)
try:
@ -415,18 +391,18 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
for constr, ind in self.constraints.iteritems():
if constr is __fixed__:
self._fixes_[ind] = FIXED
if numpy.all(self._fixes_): self._fixes_ = None # ==UNFIXED
if numpy.all(self._fixes_): self._fixes_ = None # ==UNFIXED
def _fixes_for(self, param):
if self._has_fixes():
return self._fixes_[self._raveled_index_for(param)]
return numpy.ones(self.size, dtype=bool)[self._raveled_index_for(param)]
def _fix(self, param, warning=True):
f = self._add_constrain(param, __fixed__, warning)
self._set_fixed(f)
def _unfix(self, param):
if self._has_fixes():
f = self._remove_constrain(param, __fixed__)
self._set_unfixed(f)
# def _fix(self, param, warning=True):
# f = self._add_constrain(param, __fixed__, warning)
# self._set_fixed(f)
# def _unfix(self, param):
# if self._has_fixes():
# f = self._remove_constrain(param, __fixed__)
# self._set_unfixed(f)
#===========================================================================
# Convenience for fixed, tied checking of param:
#===========================================================================
@ -437,7 +413,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
if not self._has_fixes():
return False
return not self._fixes_[self._raveled_index_for(param)].any()
#return not self._fixes_[self._offset_for(param): self._offset_for(param)+param._realsize_].any()
# return not self._fixes_[self._offset_for(param): self._offset_for(param)+param._realsize_].any()
@property
def is_fixed(self):
for p in self._parameters_:
@ -455,54 +431,33 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
#===========================================================================
# Constraint Handling:
#===========================================================================
def _add_constrain(self, param, transform, warning=True):
rav_i = self._raveled_index_for(param)
reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before
# if removing constraints before adding new is not wanted, just delete the above line!
self.constraints.add(transform, rav_i)
param = self._get_original(param)
if not (transform == __fixed__):
param._set_params(transform.initialize(param._get_params()), update=False)
if warning and any(reconstrained):
# if you want to print the whole params object, which was reconstrained use:
# m = str(param[self._backtranslate_index(param, reconstrained)])
print "Warning: re-constraining parameters:\n{}".format(param._short())
return rav_i
def _remove_constrain(self, param, *transforms, **kwargs):
if not transforms:
transforms = self.constraints.properties()
removed_indices = numpy.array([]).astype(int)
if "index" in kwargs: index = kwargs['index']
else: index = self._raveled_index_for(param)
for constr in transforms:
removed = self.constraints.remove(constr, index)
if constr is __fixed__:
self._set_unfixed(removed)
removed_indices = numpy.union1d(removed_indices, removed)
return removed_indices
# convienience for iterating over items
def _constraints_iter_items(self, param):
for constr, ind in self.constraints.iteritems():
ind = self._backtranslate_index(param, ind)
if not index_empty(ind):
yield constr, ind
def _constraints_iter(self, param):
for constr, _ in self._constraints_iter_items(param):
yield constr
def _contraints_iter_indices(self, param):
# iterate through all constraints belonging to param
for _, ind in self._constraints_iter_items(param):
yield ind
def _constraint_indices(self, param, constraint):
# indices in model range for param and constraint
return self._backtranslate_index(param, self.constraints[constraint]) + self._offset_for(param)
def _constraints_for(self, param, rav_index):
# constraint for param given its internal rav_index
return self.constraints.properties_for(rav_index+self._offset_for(param))
def _constraints_for_collect(self, param, rav_index):
# constraint for param given its internal rav_index
cs = self._constraints_for(param, rav_index)
return set(itertools.chain(*cs))
#===========================================================================
# def _add_constrain(self, param, transform, warning=True):
# rav_i = self._raveled_index_for(param)
# reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before
# # if removing constraints before adding new is not wanted, just delete the above line!
# self.constraints.add(transform, rav_i)
# param = self._get_original(param)
# if not (transform == __fixed__):
# param._set_params(transform.initialize(param._get_params()), update=False)
# if warning and any(reconstrained):
# # if you want to print the whole params object, which was reconstrained use:
# # m = str(param[self._backtranslate_index(param, reconstrained)])
# print "Warning: re-constraining parameters:\n{}".format(param._short())
# return rav_i
# def _remove_constrain(self, param, *transforms, **kwargs):
# if not transforms:
# transforms = self.constraints.properties()
# removed_indices = numpy.array([]).astype(int)
# if "index" in kwargs: index = kwargs['index']
# else: index = self._raveled_index_for(param)
# for constr in transforms:
# removed = self.constraints.remove(constr, index)
# if constr is __fixed__:
# self._set_unfixed(removed)
# removed_indices = numpy.union1d(removed_indices, removed)
# return removed_indices
#===========================================================================
#===========================================================================
# Get/set parameters:
#===========================================================================
@ -539,7 +494,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
# def __getattribute__(self, name):
# #try:
# return object.__getattribute__(self, name)
#except AttributeError:
# except AttributeError:
# _, a, tb = sys.exc_info()
# try:
# return self.__getitem__(name)
@ -592,22 +547,22 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
return [','.join(x._ties_str) for x in self.flattened_parameters]
def __str__(self, header=True):
name = adjust_name_for_printing(self.name) + "."
name = adjust_name_for_printing(self.name) + "."
constrs = self._constraints_str; ts = self._ties_str
desc = self._description_str; names = self.parameter_names
nl = max([len(str(x)) for x in names + [name]])
sl = max([len(str(x)) for x in desc + ["Value"]])
cl = max([len(str(x)) if x else 0 for x in constrs + ["Constraint"]])
cl = max([len(str(x)) if x else 0 for x in constrs + ["Constraint"]])
tl = max([len(str(x)) if x else 0 for x in ts + ["Tied to"]])
format_spec = " \033[1m{{name:<{0}s}}\033[0;0m | {{desc:^{1}s}} | {{const:^{2}s}} | {{t:^{3}s}}".format(nl, sl, cl, tl)
to_print = []
for n, d, c, t in itertools.izip(names, desc, constrs, ts):
to_print.append(format_spec.format(name=n, desc=d, const=c, t=t))
#to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)]
sep = '-'*(nl+sl+cl+tl+8*2+3)
# to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)]
sep = '-' * (nl + sl + cl + tl + 8 * 2 + 3)
if header:
header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}}".format(nl, sl, cl, tl).format(name, "Value", "Constraint", "Tied to")
#header += '\n' + sep
# header += '\n' + sep
to_print.insert(0, header)
return '\n'.format(sep).join(to_print)
pass

View file

@ -27,6 +27,8 @@ class Transformation(object):
raise NotImplementedError
def __str__(self):
raise NotImplementedError
def __repr__(self):
return self.__class__.__name__
class Logexp(Transformation):
domain = _POSITIVE
@ -56,7 +58,7 @@ class NegativeLogexp(Transformation):
return -self.logexp.initialize(f) # np.abs(f)
def __str__(self):
return '-ve'
class LogexpClipped(Logexp):
max_bound = 1e100
min_bound = 1e-10
@ -94,7 +96,6 @@ class LogexpClipped(Logexp):
def __str__(self):
return '+ve_c'
class Exponent(Transformation):
# TODO: can't allow this to go to zero, need to set a lower bound. Similar with negative Exponent below. See old MATLAB code.
domain = _POSITIVE

View file

@ -23,7 +23,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
"""
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
Z=None, kernel=None, inference_method=None, likelihood=Gaussian(), name='bayesian gplvm', **kwargs):
Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
if X == None:
X = self.initialise_latent(init, input_dim, Y)
self.init = init
@ -37,7 +37,9 @@ class BayesianGPLVM(SparseGP, GPLVM):
if kernel is None:
kernel = kern.rbf(input_dim) # + kern.white(input_dim)
if likelihood is None:
likelihood = Gaussian()
self.q = Normal(X, X_variance)
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, X_variance, name, **kwargs)
self.add_parameter(self.q, index=0)