mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-11 21:12:38 +02:00
Merge branch 'params' of github.com:SheffieldML/GPy into params
This commit is contained in:
commit
6d30a11ae8
15 changed files with 354 additions and 236 deletions
|
|
@ -26,7 +26,7 @@ class GP(Model):
|
|||
|
||||
|
||||
"""
|
||||
def __init__(self, X, Y, kernel, likelihood, inference_method=None, name='gp'):
|
||||
def __init__(self, X, Y, kernel, likelihood, inference_method=None, Y_metadata=None, name='gp'):
|
||||
super(GP, self).__init__(name)
|
||||
|
||||
assert X.ndim == 2
|
||||
|
|
@ -38,6 +38,11 @@ class GP(Model):
|
|||
assert Y.shape[0] == self.num_data
|
||||
_, self.output_dim = self.Y.shape
|
||||
|
||||
if Y_metadata is not None:
|
||||
self.Y_metadata = ObservableArray(Y_metadata)
|
||||
else:
|
||||
self.Y_metadata = None
|
||||
|
||||
assert isinstance(kernel, kern.kern)
|
||||
self.kern = kernel
|
||||
|
||||
|
|
@ -59,7 +64,7 @@ class GP(Model):
|
|||
self.parameters_changed()
|
||||
|
||||
def parameters_changed(self):
|
||||
self.posterior, self._log_marginal_likelihood, grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y)
|
||||
self.posterior, self._log_marginal_likelihood, grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y, Y_metadata=self.Y_metadata)
|
||||
self._dL_dK = grad_dict['dL_dK']
|
||||
|
||||
def log_likelihood(self):
|
||||
|
|
|
|||
|
|
@ -437,7 +437,7 @@ class Model(Parameterized):
|
|||
else:
|
||||
param_list = self._raveled_index_for(target_param)
|
||||
if self._has_fixes():
|
||||
param_list = np.intersect1d(param_list, np.r_[:self.size][self._fixes_], True)
|
||||
param_list = np.intersect1d(np.r_[:self.size][self._fixes_], param_list, True)
|
||||
|
||||
if param_list.size == 0:
|
||||
print "No free parameters to check"
|
||||
|
|
@ -448,14 +448,14 @@ class Model(Parameterized):
|
|||
ret = True
|
||||
for i, ind in enumerate(param_list):
|
||||
xx = x.copy()
|
||||
xx[ind] += step
|
||||
xx[i] += step
|
||||
f1 = self.objective_function(xx)
|
||||
xx[ind] -= 2.*step
|
||||
xx[i] -= 2.*step
|
||||
f2 = self.objective_function(xx)
|
||||
|
||||
numerical_gradient = (f1 - f2) / (2 * step)
|
||||
ratio = (f1 - f2) / (2 * step * gradient[ind])
|
||||
difference = np.abs((f1 - f2) / 2 / step - gradient[ind])
|
||||
ratio = (f1 - f2) / (2 * step * gradient[i])
|
||||
difference = np.abs((f1 - f2) / 2 / step - gradient[i])
|
||||
|
||||
if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance:
|
||||
formatted_name = "\033[92m {0} \033[0m".format(names[ind])
|
||||
|
|
@ -466,7 +466,7 @@ class Model(Parameterized):
|
|||
|
||||
r = '%.6f' % float(ratio)
|
||||
d = '%.6f' % float(difference)
|
||||
g = '%.6f' % gradient[ind]
|
||||
g = '%.6f' % gradient[i]
|
||||
ng = '%.6f' % float(numerical_gradient)
|
||||
grad_string = "{0:<{c0}}|{1:^{c1}}|{2:^{c2}}|{3:^{c3}}|{4:^{c4}}".format(formatted_name, r, d, g, ng, c0=cols[0] + 9, c1=cols[1], c2=cols[2], c3=cols[3], c4=cols[4])
|
||||
print grad_string
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
__updated__ = '2013-12-16'
|
||||
|
||||
import numpy as np
|
||||
from parameter_core import Observable, Constrainable, Gradcheckable
|
||||
from parameter_core import Observable, Parameterizable
|
||||
|
||||
class ParamList(list):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class ParameterIndexOperations(object):
|
|||
index array, for multi-param handling.
|
||||
'''
|
||||
def __init__(self):
|
||||
self._properties = ParamDict()
|
||||
self._properties = IntArrayDict()
|
||||
#self._reverse = collections.defaultdict(list)
|
||||
|
||||
def __getstate__(self):
|
||||
|
|
@ -71,16 +71,19 @@ class ParameterIndexOperations(object):
|
|||
def iteritems(self):
|
||||
return self._properties.iteritems()
|
||||
|
||||
def items(self):
|
||||
return self._properties.items()
|
||||
|
||||
def properties(self):
|
||||
return self._properties.keys()
|
||||
|
||||
def iter_properties(self):
|
||||
def iterproperties(self):
|
||||
return self._properties.iterkeys()
|
||||
|
||||
def shift(self, start, size):
|
||||
for ind in self.iterindices():
|
||||
toshift = ind>=start
|
||||
if len(toshift) > 0:
|
||||
if toshift.size > 0:
|
||||
ind[toshift] += size
|
||||
|
||||
def clear(self):
|
||||
|
|
@ -96,7 +99,7 @@ class ParameterIndexOperations(object):
|
|||
return self._properties.values()
|
||||
|
||||
def properties_for(self, index):
|
||||
return vectorize(lambda i: [prop for prop in self.iter_properties() if i in self._properties[prop]], otypes=[list])(index)
|
||||
return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
|
||||
|
||||
def add(self, prop, indices):
|
||||
try:
|
||||
|
|
@ -114,9 +117,14 @@ class ParameterIndexOperations(object):
|
|||
del self._properties[prop]
|
||||
return removed.astype(int)
|
||||
return numpy.array([]).astype(int)
|
||||
|
||||
def __getitem__(self, prop):
|
||||
return self._properties[prop]
|
||||
|
||||
def __str__(self, *args, **kwargs):
|
||||
import pprint
|
||||
return pprint.pformat(dict(self._properties))
|
||||
|
||||
def combine_indices(arr1, arr2):
|
||||
return numpy.union1d(arr1, arr2)
|
||||
|
||||
|
|
@ -126,5 +134,94 @@ def remove_indices(arr, to_remove):
|
|||
def index_empty(index):
|
||||
return numpy.size(index) == 0
|
||||
|
||||
class ParameterIndexOperationsView(object):
|
||||
def __init__(self, param_index_operations, offset, size):
|
||||
self._param_index_ops = param_index_operations
|
||||
self._offset = offset
|
||||
self._size = size
|
||||
|
||||
def __getstate__(self):
|
||||
return [self._param_index_ops, self._offset, self._size]
|
||||
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._param_index_ops = state[0]
|
||||
self._offset = state[1]
|
||||
self._size = state[2]
|
||||
|
||||
|
||||
def _filter_index(self, ind):
|
||||
return ind[(ind >= self._offset) * (ind < (self._offset + self._size))] - self._offset
|
||||
|
||||
|
||||
def iteritems(self):
|
||||
for i, ind in self._param_index_ops.iteritems():
|
||||
ind2 = self._filter_index(ind)
|
||||
if ind2.size > 0:
|
||||
yield i, ind2
|
||||
|
||||
def items(self):
|
||||
return [[i,v] for i,v in self.iteritems()]
|
||||
|
||||
def properties(self):
|
||||
return [i for i in self.iterproperties()]
|
||||
|
||||
|
||||
def iterproperties(self):
|
||||
for i, _ in self.iteritems():
|
||||
yield i
|
||||
|
||||
|
||||
def shift(self, start, size):
|
||||
raise NotImplementedError, 'Shifting only supported in original ParamIndexOperations'
|
||||
|
||||
|
||||
def clear(self):
|
||||
for i, ind in self.items():
|
||||
self._param_index_ops.remove(i, ind+self._offset)
|
||||
|
||||
|
||||
def size(self):
|
||||
return reduce(lambda a,b: a+b.size, self.iterindices(), 0)
|
||||
|
||||
|
||||
def iterindices(self):
|
||||
for _, ind in self.iteritems():
|
||||
yield ind
|
||||
|
||||
|
||||
def indices(self):
|
||||
[ind for ind in self.iterindices()]
|
||||
|
||||
|
||||
def properties_for(self, index):
|
||||
return vectorize(lambda i: [prop for prop in self.iterproperties() if i in self[prop]], otypes=[list])(index)
|
||||
|
||||
|
||||
def add(self, prop, indices):
|
||||
self._param_index_ops.add(prop, indices+self._offset)
|
||||
|
||||
|
||||
def remove(self, prop, indices):
|
||||
removed = self._param_index_ops.remove(prop, indices+self._offset)
|
||||
if removed.size > 0:
|
||||
return removed - self._size + 1
|
||||
return removed
|
||||
|
||||
|
||||
def __getitem__(self, prop):
|
||||
ind = self._filter_index(self._param_index_ops[prop])
|
||||
if ind.size > 0:
|
||||
return ind
|
||||
raise KeyError, prop
|
||||
|
||||
def __str__(self, *args, **kwargs):
|
||||
import pprint
|
||||
return pprint.pformat(dict(self.iteritems()))
|
||||
|
||||
def update(self, parameter_index_view):
|
||||
for i, v in parameter_index_view.iteritems():
|
||||
self.add(i, v)
|
||||
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import itertools
|
||||
import numpy
|
||||
from parameter_core import Constrainable, Gradcheckable, adjust_name_for_printing
|
||||
from parameter_core import Constrainable, Gradcheckable, Indexable, Parameterizable, adjust_name_for_printing
|
||||
from array_core import ObservableArray, ParamList
|
||||
|
||||
###### printing
|
||||
|
|
@ -14,13 +14,7 @@ __precision__ = numpy.get_printoptions()['precision'] # numpy printing precision
|
|||
__print_threshold__ = 5
|
||||
######
|
||||
|
||||
class Float(numpy.float64, Constrainable):
|
||||
def __init__(self, f, base):
|
||||
super(Float,self).__init__(f)
|
||||
self._base = base
|
||||
|
||||
|
||||
class Param(ObservableArray, Constrainable, Gradcheckable):
|
||||
class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parameterizable):
|
||||
"""
|
||||
Parameter object for GPy models.
|
||||
|
||||
|
|
@ -364,7 +358,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
|
|||
return [self.shape]
|
||||
@property
|
||||
def _constraints_str(self):
|
||||
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self._highest_parent_._constraints_iter_items(self)))]
|
||||
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self.constraints.iteritems()))]
|
||||
@property
|
||||
def _ties_str(self):
|
||||
return [t._short() for t in self._tied_to_] or ['']
|
||||
|
|
@ -390,7 +384,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
|
|||
else: ties[i, matches[0]] = numpy.take(tt_rav_index, matches[1], mode='wrap')
|
||||
return map(lambda a: sum(a, []), zip(*[[[tie.flatten()] if tx != None else [] for tx in t] for t, tie in zip(ties, self._tied_to_)]))
|
||||
def _constraints_for(self, rav_index):
|
||||
return self._highest_parent_._constraints_for(self, rav_index)
|
||||
return self.constraints.properties_for(rav_index)
|
||||
def _indices(self, slice_index=None):
|
||||
# get a int-array containing all indices in the first axis.
|
||||
if slice_index is None:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
|
||||
from transformations import Logexp, NegativeLogexp, Logistic
|
||||
from transformations import Transformation, Logexp, NegativeLogexp, Logistic
|
||||
|
||||
__updated__ = '2013-12-16'
|
||||
|
||||
|
|
@ -10,6 +10,11 @@ def adjust_name_for_printing(name):
|
|||
return name.replace(" ", "_").replace(".", "_").replace("-","").replace("+","").replace("!","").replace("*","").replace("/","")
|
||||
return ''
|
||||
|
||||
#===============================================================================
|
||||
# Printing:
|
||||
__fixed__ = "fixed"
|
||||
#===============================================================================
|
||||
|
||||
class Observable(object):
|
||||
_observers_ = {}
|
||||
def add_observer(self, observer, callble):
|
||||
|
|
@ -20,6 +25,22 @@ class Observable(object):
|
|||
def _notify_observers(self):
|
||||
[callble(self) for callble in self._observers_.itervalues()]
|
||||
|
||||
class Parameterizable(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from GPy.core.parameterization.array_core import ParamList
|
||||
_parameters_ = ParamList()
|
||||
|
||||
def parameter_names(self):
|
||||
return [p.name for p in self._parameters_]
|
||||
|
||||
def parameters_changed(self):
|
||||
"""
|
||||
This method gets called when parameters have changed.
|
||||
Another way of listening to param changes is to
|
||||
add self as a listener to the param, such that
|
||||
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
|
||||
"""
|
||||
pass
|
||||
|
||||
class Pickleable(object):
|
||||
def _getstate(self):
|
||||
|
|
@ -89,11 +110,22 @@ class Gradcheckable(Parentable):
|
|||
def _checkgrad(self, param):
|
||||
raise NotImplementedError, "Need log likelihood to check gradient against"
|
||||
|
||||
class Indexable(object):
|
||||
def _raveled_index(self):
|
||||
raise NotImplementedError, "Need to be able to get the raveled Index"
|
||||
|
||||
class Constrainable(Nameable):
|
||||
def _internal_offset(self):
|
||||
return 0
|
||||
|
||||
def _offset_for(self, param):
|
||||
raise NotImplementedError, "shouldnt happen, offset required from non parameterization object?"
|
||||
|
||||
class Constrainable(Nameable, Indexable, Parameterizable):
|
||||
def __init__(self, name, default_constraint=None):
|
||||
super(Constrainable,self).__init__(name)
|
||||
self._default_constraint_ = default_constraint
|
||||
from index_operations import ParameterIndexOperations
|
||||
self.constraints = ParameterIndexOperations()
|
||||
#===========================================================================
|
||||
# Fixing Parameters:
|
||||
#===========================================================================
|
||||
|
|
@ -105,17 +137,30 @@ class Constrainable(Nameable):
|
|||
"""
|
||||
if value is not None:
|
||||
self[:] = value
|
||||
self._highest_parent_._fix(self,warning)
|
||||
self.constrain(__fixed__, warning=warning)
|
||||
self._highest_parent_._set_fixed(self._raveled_index())
|
||||
fix = constrain_fixed
|
||||
def unconstrain_fixed(self):
|
||||
"""
|
||||
This parameter will no longer be fixed.
|
||||
"""
|
||||
self._highest_parent_._unfix(self)
|
||||
unconstrained = self.unconstrain(__fixed__)
|
||||
import ipdb;ipdb.set_trace()
|
||||
self._highest_parent_._set_unfixed(unconstrained)
|
||||
|
||||
unfix = unconstrain_fixed
|
||||
#===========================================================================
|
||||
# Constrain operations -> done
|
||||
#===========================================================================
|
||||
def _parent_changed(self, parent):
|
||||
c = self.constraints
|
||||
from index_operations import ParameterIndexOperationsView
|
||||
self.constraints = ParameterIndexOperationsView(parent.constraints, parent._offset_for(self), self.size)
|
||||
self.constraints.update(c)
|
||||
del c
|
||||
for p in self._parameters_:
|
||||
p._parent_changed(parent)
|
||||
|
||||
def constrain(self, transform, warning=True, update=True):
|
||||
"""
|
||||
:param transform: the :py:class:`GPy.core.transformations.Transformation`
|
||||
|
|
@ -125,15 +170,21 @@ class Constrainable(Nameable):
|
|||
Constrain the parameter to the given
|
||||
:py:class:`GPy.core.transformations.Transformation`.
|
||||
"""
|
||||
if self.has_parent():
|
||||
self._highest_parent_._add_constrain(self, transform, warning)
|
||||
if isinstance(transform, Transformation):
|
||||
self._set_params(transform.initialize(self._get_params()), update=False)
|
||||
reconstrained = self.unconstrain()
|
||||
self.constraints.add(transform, self._raveled_index())
|
||||
if reconstrained.size > 0:
|
||||
print "WARNING: reconstraining parameters {}".format(self.parameter_names)
|
||||
if update:
|
||||
self._highest_parent_.parameters_changed()
|
||||
else:
|
||||
for p in self._parameters_:
|
||||
self._add_constrain(p, transform, warning)
|
||||
if update:
|
||||
self.parameters_changed()
|
||||
# if self.has_parent():
|
||||
# self._highest_parent_._add_constrain(self, transform, warning)
|
||||
# else:
|
||||
# for p in self._parameters_:
|
||||
# self._add_constrain(p, transform, warning)
|
||||
# if update:
|
||||
# self.parameters_changed()
|
||||
|
||||
def constrain_positive(self, warning=True, update=True):
|
||||
"""
|
||||
|
|
@ -167,11 +218,13 @@ class Constrainable(Nameable):
|
|||
remove all :py:class:`GPy.core.transformations.Transformation`
|
||||
transformats of this parameter object.
|
||||
"""
|
||||
if self.has_parent():
|
||||
self._highest_parent_._remove_constrain(self, *transforms)
|
||||
else:
|
||||
for p in self._parameters_:
|
||||
self._remove_constrain(p, *transforms)
|
||||
if len(transforms) == 0:
|
||||
transforms = self.constraints.properties()
|
||||
import numpy as np
|
||||
removed = np.empty((0,),dtype=int)
|
||||
for t in transforms:
|
||||
removed = np.union1d(removed, self.constraints.remove(t, self._raveled_index()))
|
||||
return removed
|
||||
|
||||
def unconstrain_positive(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -8,16 +8,9 @@ import cPickle
|
|||
import itertools
|
||||
from re import compile, _pattern_type
|
||||
from param import ParamConcatenation, Param
|
||||
from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable
|
||||
from index_operations import ParameterIndexOperations,\
|
||||
index_empty
|
||||
from parameter_core import Constrainable, Pickleable, Observable, adjust_name_for_printing, Gradcheckable, __fixed__
|
||||
from array_core import ParamList
|
||||
|
||||
#===============================================================================
|
||||
# Printing:
|
||||
__fixed__ = "fixed"
|
||||
#===============================================================================
|
||||
|
||||
#===============================================================================
|
||||
# constants
|
||||
FIXED = False
|
||||
|
|
@ -69,7 +62,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
def __init__(self, name=None):
|
||||
super(Parameterized, self).__init__(name=name)
|
||||
self._in_init_ = True
|
||||
self._constraints_ = None#ParameterIndexOperations()
|
||||
self._parameters_ = ParamList()
|
||||
self.size = sum(p.size for p in self._parameters_)
|
||||
if not self._has_fixes():
|
||||
|
|
@ -79,11 +71,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
self._added_names_ = set()
|
||||
del self._in_init_
|
||||
|
||||
@property
|
||||
def constraints(self):
|
||||
if self._constraints_ is None:
|
||||
self._constraints_ = ParameterIndexOperations()
|
||||
return self._constraints_
|
||||
#===========================================================================
|
||||
# Parameter connection for model creation:
|
||||
#===========================================================================
|
||||
|
|
@ -128,6 +115,8 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
Add all parameters to this param class, you can insert parameters
|
||||
at any given index using the :func:`list.insert` syntax
|
||||
"""
|
||||
# if param.has_parent():
|
||||
# raise AttributeError, "parameter {} already in another model, create new object (or copy) for adding".format(param._short())
|
||||
if param in self._parameters_ and index is not None:
|
||||
# make sure fixes and constraints are indexed right
|
||||
if self._has_fixes():
|
||||
|
|
@ -170,14 +159,10 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
else:
|
||||
raise RuntimeError, """Parameter exists already added and no copy made"""
|
||||
self._connect_parameters()
|
||||
# make sure the constraints are pulled over:
|
||||
if hasattr(param, "_constraints_") and param._constraints_ is not None:
|
||||
for t, ind in param._constraints_.iteritems():
|
||||
|
||||
self.constraints.add(t, ind+self._offset_for(param))
|
||||
param._constraints_.clear()
|
||||
for p in self._parameters_:
|
||||
p._parent_changed(self)
|
||||
if param._default_constraint_ is not None:
|
||||
self._add_constrain(param, param._default_constraint_, False)
|
||||
param.constrain(param._default_constraint_, False)
|
||||
if self._has_fixes() and np.all(self._fixes_): # ==UNFIXED
|
||||
self._fixes_ = None
|
||||
|
||||
|
|
@ -202,20 +187,12 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
or p in names_params_indices)])
|
||||
self._connect_parameters()
|
||||
|
||||
def parameters_changed(self):
|
||||
"""
|
||||
This method gets called when parameters have changed.
|
||||
Another way of listening to param changes is to
|
||||
add self as a listener to the param, such that
|
||||
updates get passed through. See :py:function:``GPy.core.param.Observable.add_observer``
|
||||
"""
|
||||
# will be called as soon as parameters have changed
|
||||
pass
|
||||
|
||||
def _connect_parameters(self):
|
||||
# connect parameterlist to this parameterized object
|
||||
# This just sets up the right connection for the params objects
|
||||
# to be used as parameters
|
||||
# it also sets the constraints for each parameter to the constraints
|
||||
# of their respective parents
|
||||
if not hasattr(self, "_parameters_") or len(self._parameters_) < 1:
|
||||
# no parameters for this class
|
||||
return
|
||||
|
|
@ -237,7 +214,6 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
elif not (pname in not_unique):
|
||||
self.__dict__[pname] = p
|
||||
self._added_names_.add(pname)
|
||||
|
||||
#===========================================================================
|
||||
# Pickling operations
|
||||
#===========================================================================
|
||||
|
|
@ -289,7 +265,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
"""
|
||||
return [
|
||||
self._fixes_,
|
||||
self._constraints_,
|
||||
self.constraints,
|
||||
self._parameters_,
|
||||
self._name,
|
||||
self._added_names_,
|
||||
|
|
@ -300,7 +276,7 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
self._name = state.pop()
|
||||
self._parameters_ = state.pop()
|
||||
self._connect_parameters()
|
||||
self._constraints_ = state.pop()
|
||||
self.constraints = state.pop()
|
||||
self._fixes_ = state.pop()
|
||||
self.parameters_changed()
|
||||
#===========================================================================
|
||||
|
|
@ -420,13 +396,13 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
if self._has_fixes():
|
||||
return self._fixes_[self._raveled_index_for(param)]
|
||||
return numpy.ones(self.size, dtype=bool)[self._raveled_index_for(param)]
|
||||
def _fix(self, param, warning=True):
|
||||
f = self._add_constrain(param, __fixed__, warning)
|
||||
self._set_fixed(f)
|
||||
def _unfix(self, param):
|
||||
if self._has_fixes():
|
||||
f = self._remove_constrain(param, __fixed__)
|
||||
self._set_unfixed(f)
|
||||
# def _fix(self, param, warning=True):
|
||||
# f = self._add_constrain(param, __fixed__, warning)
|
||||
# self._set_fixed(f)
|
||||
# def _unfix(self, param):
|
||||
# if self._has_fixes():
|
||||
# f = self._remove_constrain(param, __fixed__)
|
||||
# self._set_unfixed(f)
|
||||
#===========================================================================
|
||||
# Convenience for fixed, tied checking of param:
|
||||
#===========================================================================
|
||||
|
|
@ -455,54 +431,33 @@ class Parameterized(Constrainable, Pickleable, Observable, Gradcheckable):
|
|||
#===========================================================================
|
||||
# Constraint Handling:
|
||||
#===========================================================================
|
||||
def _add_constrain(self, param, transform, warning=True):
|
||||
rav_i = self._raveled_index_for(param)
|
||||
reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before
|
||||
# if removing constraints before adding new is not wanted, just delete the above line!
|
||||
self.constraints.add(transform, rav_i)
|
||||
param = self._get_original(param)
|
||||
if not (transform == __fixed__):
|
||||
param._set_params(transform.initialize(param._get_params()), update=False)
|
||||
if warning and any(reconstrained):
|
||||
# if you want to print the whole params object, which was reconstrained use:
|
||||
# m = str(param[self._backtranslate_index(param, reconstrained)])
|
||||
print "Warning: re-constraining parameters:\n{}".format(param._short())
|
||||
return rav_i
|
||||
def _remove_constrain(self, param, *transforms, **kwargs):
|
||||
if not transforms:
|
||||
transforms = self.constraints.properties()
|
||||
removed_indices = numpy.array([]).astype(int)
|
||||
if "index" in kwargs: index = kwargs['index']
|
||||
else: index = self._raveled_index_for(param)
|
||||
for constr in transforms:
|
||||
removed = self.constraints.remove(constr, index)
|
||||
if constr is __fixed__:
|
||||
self._set_unfixed(removed)
|
||||
removed_indices = numpy.union1d(removed_indices, removed)
|
||||
return removed_indices
|
||||
# convienience for iterating over items
|
||||
def _constraints_iter_items(self, param):
|
||||
for constr, ind in self.constraints.iteritems():
|
||||
ind = self._backtranslate_index(param, ind)
|
||||
if not index_empty(ind):
|
||||
yield constr, ind
|
||||
def _constraints_iter(self, param):
|
||||
for constr, _ in self._constraints_iter_items(param):
|
||||
yield constr
|
||||
def _contraints_iter_indices(self, param):
|
||||
# iterate through all constraints belonging to param
|
||||
for _, ind in self._constraints_iter_items(param):
|
||||
yield ind
|
||||
def _constraint_indices(self, param, constraint):
|
||||
# indices in model range for param and constraint
|
||||
return self._backtranslate_index(param, self.constraints[constraint]) + self._offset_for(param)
|
||||
def _constraints_for(self, param, rav_index):
|
||||
# constraint for param given its internal rav_index
|
||||
return self.constraints.properties_for(rav_index+self._offset_for(param))
|
||||
def _constraints_for_collect(self, param, rav_index):
|
||||
# constraint for param given its internal rav_index
|
||||
cs = self._constraints_for(param, rav_index)
|
||||
return set(itertools.chain(*cs))
|
||||
#===========================================================================
|
||||
# def _add_constrain(self, param, transform, warning=True):
|
||||
# rav_i = self._raveled_index_for(param)
|
||||
# reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before
|
||||
# # if removing constraints before adding new is not wanted, just delete the above line!
|
||||
# self.constraints.add(transform, rav_i)
|
||||
# param = self._get_original(param)
|
||||
# if not (transform == __fixed__):
|
||||
# param._set_params(transform.initialize(param._get_params()), update=False)
|
||||
# if warning and any(reconstrained):
|
||||
# # if you want to print the whole params object, which was reconstrained use:
|
||||
# # m = str(param[self._backtranslate_index(param, reconstrained)])
|
||||
# print "Warning: re-constraining parameters:\n{}".format(param._short())
|
||||
# return rav_i
|
||||
# def _remove_constrain(self, param, *transforms, **kwargs):
|
||||
# if not transforms:
|
||||
# transforms = self.constraints.properties()
|
||||
# removed_indices = numpy.array([]).astype(int)
|
||||
# if "index" in kwargs: index = kwargs['index']
|
||||
# else: index = self._raveled_index_for(param)
|
||||
# for constr in transforms:
|
||||
# removed = self.constraints.remove(constr, index)
|
||||
# if constr is __fixed__:
|
||||
# self._set_unfixed(removed)
|
||||
# removed_indices = numpy.union1d(removed_indices, removed)
|
||||
# return removed_indices
|
||||
#===========================================================================
|
||||
#===========================================================================
|
||||
# Get/set parameters:
|
||||
#===========================================================================
|
||||
|
|
|
|||
|
|
@ -27,6 +27,8 @@ class Transformation(object):
|
|||
raise NotImplementedError
|
||||
def __str__(self):
|
||||
raise NotImplementedError
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
class Logexp(Transformation):
|
||||
domain = _POSITIVE
|
||||
|
|
@ -94,7 +96,6 @@ class LogexpClipped(Logexp):
|
|||
def __str__(self):
|
||||
return '+ve_c'
|
||||
|
||||
|
||||
class Exponent(Transformation):
|
||||
# TODO: can't allow this to go to zero, need to set a lower bound. Similar with negative Exponent below. See old MATLAB code.
|
||||
domain = _POSITIVE
|
||||
|
|
|
|||
|
|
@ -87,18 +87,22 @@ def toy_linear_1d_classification_laplace(seed=default_seed, optimize=True, plot=
|
|||
Y = data['Y'][:, 0:1]
|
||||
Y[Y.flatten() == -1] = 0
|
||||
|
||||
bern_noise_model = GPy.likelihoods.bernoulli()
|
||||
laplace_likelihood = GPy.likelihoods.Laplace(Y.copy(), bern_noise_model)
|
||||
likelihood = GPy.likelihoods.Bernoulli()
|
||||
laplace_inf = GPy.inference.latent_function_inference.Laplace()
|
||||
kernel = GPy.kern.rbf(1)
|
||||
|
||||
# Model definition
|
||||
m = GPy.models.GPClassification(data['X'], Y, likelihood=laplace_likelihood)
|
||||
print m
|
||||
m = GPy.core.GP(data['X'], Y, kernel=kernel, likelihood=likelihood, inference_method=laplace_inf)
|
||||
|
||||
# Optimize
|
||||
if optimize:
|
||||
#m.update_likelihood_approximation()
|
||||
# Parameters optimization:
|
||||
m.optimize('bfgs', messages=1)
|
||||
try:
|
||||
m.optimize('scg', messages=1)
|
||||
except Exception as e:
|
||||
return m
|
||||
|
||||
#m.pseudo_EM()
|
||||
|
||||
# Plot
|
||||
|
|
|
|||
|
|
@ -42,38 +42,35 @@ def student_t_approx(optimize=True, plot=True):
|
|||
kernel4 = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||
|
||||
#Gaussian GP model on clean data
|
||||
m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1)
|
||||
# optimize
|
||||
m1.ensure_default_constraints()
|
||||
m1['white'] = 1e-5
|
||||
m1['white'].constrain_fixed('white')
|
||||
m1.randomize()
|
||||
#m1 = GPy.models.GPRegression(X, Y.copy(), kernel=kernel1)
|
||||
## optimize
|
||||
#m1['white'].constrain_fixed(1e-5)
|
||||
#m1.randomize()
|
||||
|
||||
#Gaussian GP model on corrupt data
|
||||
m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2)
|
||||
m2.ensure_default_constraints()
|
||||
m1['white'] = 1e-5
|
||||
m1['white'].constrain_fixed('white')
|
||||
m2.randomize()
|
||||
##Gaussian GP model on corrupt data
|
||||
#m2 = GPy.models.GPRegression(X, Yc.copy(), kernel=kernel2)
|
||||
#m1['white'].constrain_fixed(1e-5)
|
||||
#m2.randomize()
|
||||
|
||||
#Student t GP model on clean data
|
||||
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
|
||||
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||
laplace_inf = GPy.inference.latent_function_inference.Laplace()
|
||||
m3 = GPy.core.GP(X, Y.copy(), kernel3, likelihood=t_distribution, inference_method=laplace_inf)
|
||||
m3.ensure_default_constraints()
|
||||
m3['t_noise'].constrain_bounded(1e-6, 10.)
|
||||
m3['white'] = 1e-5
|
||||
m3['white'].constrain_fixed()
|
||||
m3['white'].constrain_fixed(1e-5)
|
||||
m3.randomize()
|
||||
debug = True
|
||||
print m3
|
||||
if debug:
|
||||
m3.optimize(messages=1)
|
||||
return m3
|
||||
|
||||
#Student t GP model on corrupt data
|
||||
t_distribution = GPy.likelihoods.StudentT(deg_free=deg_free, sigma2=edited_real_sd)
|
||||
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||
laplace_inf = GPy.inference.latent_function_inference.Laplace()
|
||||
m4 = GPy.core.GP(X, Yc.copy(), kernel4, likelihood=t_distribution, inference_method=laplace_inf)
|
||||
m4.ensure_default_constraints()
|
||||
m4['t_noise'].constrain_bounded(1e-6, 10.)
|
||||
m4['white'] = 1e-5
|
||||
m4['white'].constrain_fixed()
|
||||
m4['white'].constrain_fixed(1e-5)
|
||||
m4.randomize()
|
||||
|
||||
if optimize:
|
||||
|
|
@ -156,7 +153,6 @@ def boston_example(optimize=True, plot=True):
|
|||
#Gaussian GP
|
||||
print "Gauss GP"
|
||||
mgp = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelgp.copy())
|
||||
mgp.ensure_default_constraints()
|
||||
mgp.constrain_fixed('white', 1e-5)
|
||||
mgp['rbf_len'] = rbf_len
|
||||
mgp['noise'] = noise
|
||||
|
|
@ -174,7 +170,6 @@ def boston_example(optimize=True, plot=True):
|
|||
g_distribution = GPy.likelihoods.noise_model_constructors.gaussian(variance=noise, N=N, D=D)
|
||||
g_likelihood = GPy.likelihoods.Laplace(Y_train.copy(), g_distribution)
|
||||
mg = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelstu.copy(), likelihood=g_likelihood)
|
||||
mg.ensure_default_constraints()
|
||||
mg.constrain_positive('noise_variance')
|
||||
mg.constrain_fixed('white', 1e-5)
|
||||
mg['rbf_len'] = rbf_len
|
||||
|
|
@ -194,7 +189,6 @@ def boston_example(optimize=True, plot=True):
|
|||
t_distribution = GPy.likelihoods.noise_model_constructors.student_t(deg_free=df, sigma2=noise)
|
||||
stu_t_likelihood = GPy.likelihoods.Laplace(Y_train.copy(), t_distribution)
|
||||
mstu_t = GPy.models.GPRegression(X_train.copy(), Y_train.copy(), kernel=kernelstu.copy(), likelihood=stu_t_likelihood)
|
||||
mstu_t.ensure_default_constraints()
|
||||
mstu_t.constrain_fixed('white', 1e-5)
|
||||
mstu_t.constrain_bounded('t_noise', 0.0001, 1000)
|
||||
mstu_t['rbf_len'] = rbf_len
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ etc.
|
|||
"""
|
||||
|
||||
from exact_gaussian_inference import ExactGaussianInference
|
||||
from laplace import LaplaceInference
|
||||
from laplace import Laplace
|
||||
expectation_propagation = 'foo' # TODO
|
||||
from varDTC import VarDTC
|
||||
from dtc import DTC
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from posterior import Posterior
|
|||
import warnings
|
||||
from scipy import optimize
|
||||
|
||||
class LaplaceInference(object):
|
||||
class Laplace(object):
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
|
|
@ -52,6 +52,7 @@ class LaplaceInference(object):
|
|||
|
||||
f_hat, Ki_fhat = self.rasm_mode(K, Y, likelihood, Ki_f_init, Y_metadata=Y_metadata)
|
||||
|
||||
self.f_hat = f_hat
|
||||
#Compute hessian and other variables at mode
|
||||
log_marginal, woodbury_vector, woodbury_inv, dL_dK, dL_dthetaL = self.mode_computations(f_hat, Ki_fhat, K, Y, likelihood, kern, Y_metadata)
|
||||
|
||||
|
|
|
|||
|
|
@ -116,7 +116,8 @@ class Bernoulli(Likelihood):
|
|||
Each y_i must be in {0, 1}
|
||||
"""
|
||||
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
|
||||
objective = (link_f**y) * ((1.-link_f)**(1.-y))
|
||||
#objective = (link_f**y) * ((1.-link_f)**(1.-y))
|
||||
objective = np.where(y, link_f, 1.-link_f)
|
||||
return np.exp(np.sum(np.log(objective)))
|
||||
|
||||
def logpdf_link(self, link_f, y, extra_data=None):
|
||||
|
|
@ -136,7 +137,9 @@ class Bernoulli(Likelihood):
|
|||
"""
|
||||
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
|
||||
#objective = y*np.log(link_f) + (1.-y)*np.log(link_f)
|
||||
state = np.seterr(divide='ignore')
|
||||
objective = np.where(y==1, np.log(link_f), np.log(1-link_f))
|
||||
np.seterr(**state)
|
||||
return np.sum(objective)
|
||||
|
||||
def dlogpdf_dlink(self, link_f, y, extra_data=None):
|
||||
|
|
@ -155,7 +158,10 @@ class Bernoulli(Likelihood):
|
|||
:rtype: Nx1 array
|
||||
"""
|
||||
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
|
||||
grad = (y/link_f) - (1.-y)/(1-link_f)
|
||||
#grad = (y/link_f) - (1.-y)/(1-link_f)
|
||||
state = np.seterr(divide='ignore')
|
||||
grad = np.where(y, 1./link_f, -1./(1-link_f))
|
||||
np.seterr(**state)
|
||||
return grad
|
||||
|
||||
def d2logpdf_dlink2(self, link_f, y, extra_data=None):
|
||||
|
|
@ -180,7 +186,10 @@ class Bernoulli(Likelihood):
|
|||
(the distribution for y_i depends only on link(f_i) not on link(f_(j!=i))
|
||||
"""
|
||||
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
|
||||
d2logpdf_dlink2 = -y/(link_f**2) - (1-y)/((1-link_f)**2)
|
||||
#d2logpdf_dlink2 = -y/(link_f**2) - (1-y)/((1-link_f)**2)
|
||||
state = np.seterr(divide='ignore')
|
||||
d2logpdf_dlink2 = np.where(y, -1./np.square(link_f), -1./np.square(1.-link_f))
|
||||
np.seterr(**state)
|
||||
return d2logpdf_dlink2
|
||||
|
||||
def d3logpdf_dlink3(self, link_f, y, extra_data=None):
|
||||
|
|
@ -199,7 +208,10 @@ class Bernoulli(Likelihood):
|
|||
:rtype: Nx1 array
|
||||
"""
|
||||
assert np.atleast_1d(link_f).shape == np.atleast_1d(y).shape
|
||||
d3logpdf_dlink3 = 2*(y/(link_f**3) - (1-y)/((1-link_f)**3))
|
||||
#d3logpdf_dlink3 = 2*(y/(link_f**3) - (1-y)/((1-link_f)**3))
|
||||
state = np.seterr(divide='ignore')
|
||||
d3logpdf_dlink3 = np.where(y, 2./(link_f**3), -2./((1.-link_f)**3))
|
||||
np.seterr(**state)
|
||||
return d3logpdf_dlink3
|
||||
|
||||
def samples(self, gp):
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
|
||||
"""
|
||||
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
|
||||
Z=None, kernel=None, inference_method=None, likelihood=Gaussian(), name='bayesian gplvm', **kwargs):
|
||||
Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
|
||||
if X == None:
|
||||
X = self.initialise_latent(init, input_dim, Y)
|
||||
self.init = init
|
||||
|
|
@ -38,6 +38,8 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
if kernel is None:
|
||||
kernel = kern.rbf(input_dim) # + kern.white(input_dim)
|
||||
|
||||
if likelihood is None:
|
||||
likelihood = Gaussian()
|
||||
self.q = Normal(X, X_variance)
|
||||
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, X_variance, name, **kwargs)
|
||||
self.add_parameter(self.q, index=0)
|
||||
|
|
|
|||
|
|
@ -516,9 +516,8 @@ class TestNoiseModels(object):
|
|||
Y = Y/Y.max()
|
||||
white_var = 1e-6
|
||||
kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||
laplace_likelihood = GPy.inference.latent_function_inference.LaplaceInference()
|
||||
laplace_likelihood = GPy.inference.latent_function_inference.Laplace()
|
||||
m = GPy.core.GP(X.copy(), Y.copy(), kernel, likelihood=model, inference_method=laplace_likelihood)
|
||||
m.ensure_default_constraints()
|
||||
m['white'].constrain_fixed(white_var)
|
||||
|
||||
#Set constraints
|
||||
|
|
@ -541,6 +540,10 @@ class TestNoiseModels(object):
|
|||
#import ipdb; ipdb.set_trace()
|
||||
#NOTE this test appears to be stochastic for some likelihoods (student t?)
|
||||
# appears to all be working in test mode right now...
|
||||
|
||||
if not m.checkgrad():
|
||||
import ipdb; ipdb.set_trace() # XXX BREAKPOINT
|
||||
|
||||
assert m.checkgrad(step=step)
|
||||
|
||||
###########
|
||||
|
|
@ -555,7 +558,6 @@ class TestNoiseModels(object):
|
|||
kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
|
||||
ep_inf = GPy.inference.latent_function_inference.EP()
|
||||
m = GPy.core.GP(X.copy(), Y.copy(), kernel=kernel, likelihood=model, inference_method=ep_inf)
|
||||
m.ensure_default_constraints()
|
||||
m['white'].constrain_fixed(white_var)
|
||||
|
||||
for param_num in range(len(param_names)):
|
||||
|
|
@ -644,13 +646,11 @@ class LaplaceTests(unittest.TestCase):
|
|||
m1['variance'] = initial_var_guess
|
||||
m1['variance'].constrain_bounded(1e-4, 10)
|
||||
m1['rbf'].constrain_bounded(1e-4, 10)
|
||||
m1.ensure_default_constraints()
|
||||
m1.randomize()
|
||||
|
||||
gauss_distr2 = GPy.likelihoods.Gaussian(variance=initial_var_guess)
|
||||
laplace_inf = GPy.inference.latent_function_inference.LaplaceInference()
|
||||
laplace_inf = GPy.inference.latent_function_inference.Laplace()
|
||||
m2 = GPy.core.GP(X, Y.copy(), kernel=kernel2, likelihood=gauss_distr2, inference_method=laplace_inf)
|
||||
m2.ensure_default_constraints()
|
||||
m2['white'].constrain_fixed(1e-6)
|
||||
m2['rbf'].constrain_bounded(1e-4, 10)
|
||||
m2['variance'].constrain_bounded(1e-4, 10)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue