diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index 71d6991c..653ea63a 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -34,16 +34,7 @@ class GPBase(Model): self.add_parameter(self.kern, gradient=lambda:self.kern.dK_dtheta(self.dL_dK, self.X)) self.add_parameter(self.likelihood, gradient=lambda:self.likelihood._gradients(partial=np.diag(self.dL_dK))) - #self.kern.connect_input(self.X) - # Model.__init__(self) - # All leaf nodes should call self._set_params(self._get_params()) at - # the end -# -# def parameters_changed(self): -# self.kern.parameters_changed() -# self.likelihood.parameters_changed() - def getstate(self): """ Get the current state of the class, here we return everything that is needed to recompute the model. diff --git a/GPy/core/parameter.py b/GPy/core/parameter.py index a7ba39d9..ae7061e8 100644 --- a/GPy/core/parameter.py +++ b/GPy/core/parameter.py @@ -6,8 +6,8 @@ Created on 4 Sep 2013 import itertools import numpy from transformations import Logexp, NegativeLogexp, Logistic -from parameterized import Nameable, Pickleable, Observable -from GPy.core.parameterized import _adjust_name_for_printing +from parameterized import Nameable, Pickleable, Observable, Parentable +from parameterized import _adjust_name_for_printing ###### printing __constraints_name__ = "Constraint" @@ -60,8 +60,95 @@ class ObservableArray(ListArray, Observable): def __setslice__(self, start, stop, val): return self.__setitem__(slice(start, stop), val) - -class Param(ObservableArray, Nameable): +class Constrainable(Nameable): + def __init__(self, name): + super(Constrainable,self).__init__(name) + #=========================================================================== + # Constrain operations -> done + #=========================================================================== + def constrain(self, transform, warning=True, update=True): + """ + :param transform: the :py:class:`GPy.core.transformations.Transformation` + to constrain the this parameter to. + :param warning: print a warning if re-constraining parameters. + + Constrain the parameter to the given + :py:class:`GPy.core.transformations.Transformation`. + """ + if self.has_parent(): + self._highest_parent_._add_constrain(self, transform, warning) + if update: + self._highest_parent_.parameters_changed() + else: + for p in self._parameters_: + self._add_constrain(p, transform, warning) + if update: + self.parameters_changed() + def constrain_positive(self, warning=True): + """ + :param warning: print a warning if re-constraining parameters. + + Constrain this parameter to the default positive constraint. + """ + self.constrain(Logexp(), warning) + + def constrain_negative(self, warning=True): + """ + :param warning: print a warning if re-constraining parameters. + + Constrain this parameter to the default negative constraint. + """ + self.constrain(NegativeLogexp(), warning) + + def constrain_bounded(self, lower, upper, warning=True): + """ + :param lower, upper: the limits to bound this parameter to + :param warning: print a warning if re-constraining parameters. + + Constrain this parameter to lie within the given range. + """ + self.constrain(Logistic(lower, upper), warning) + + def unconstrain(self, *transforms): + """ + :param transforms: The transformations to unconstrain from. + + remove all :py:class:`GPy.core.transformations.Transformation` + transformats of this parameter object. + """ + if self.has_parent(): + self._highest_parent_._remove_constrain(self, *transforms) + else: + for p in self._parameters_: + self._remove_constrain(p, *transforms) + + def unconstrain_positive(self): + """ + Remove positive constraint of this parameter. + """ + self.unconstrain(Logexp()) + + def unconstrain_negative(self): + """ + Remove negative constraint of this parameter. + """ + self.unconstrain(NegativeLogexp()) + + def unconstrain_bounded(self, lower, upper): + """ + :param lower, upper: the limits to unbound this parameter from + + Remove (lower, upper) bounded constrain from this parameter/ + """ + self.unconstrain(Logistic(lower, upper)) + +class Float(numpy.float64, Constrainable): + def __init__(self, f, base): + super(Float,self).__init__(f) + self._base = base + + +class Param(ObservableArray, Constrainable): """ Parameter object for GPy models. @@ -82,23 +169,29 @@ class Param(ObservableArray, Nameable): Fixing parameters will fix them to the value they are right now. If you change the fixed value, it will be fixed to the new value! - See :py:class:`GPy.core.parameterized.Parameterized` for more details. + See :py:class:`GPy.core.parameterized.Parameterized` for more details on constraining etc. + + This ndarray can be stored in lists and checked if it is in. + + >>> import numpy as np + >>> x = np.random.normal(size=(10,3)) + >>> x in [[1], x, [3]] + True + + WARNING: This overrides the functionality of x==y!!! + Use numpy.equal(x,y) for element-wise equality testing. """ - __array_priority__ = -numpy.inf # Never give back Param + __array_priority__ = 0 # Never give back Param _fixes_ = None def __new__(cls, name, input_array, *args, **kwargs): obj = numpy.atleast_1d(super(Param, cls).__new__(cls, input_array=input_array)) - obj._direct_parent_ = None - #obj.name = name - obj._parent_index_ = None - obj._highest_parent_ = None obj._current_slice_ = (slice(obj.shape[0]),) obj._realshape_ = obj.shape obj._realsize_ = obj.size obj._realndim_ = obj.ndim obj._updated_ = False - from index_operations import ParamDict - obj._tied_to_me_ = ParamDict(set) + from index_operations import SetDict + obj._tied_to_me_ = SetDict() obj._tied_to_ = [] obj._original_ = True return obj @@ -110,11 +203,10 @@ class Param(ObservableArray, Nameable): # see InfoArray.__array_finalize__ for comments if obj is None: return super(Param, self).__array_finalize__(obj) - self.name = getattr(obj, 'name', None) - self._current_slice_ = getattr(obj, '_current_slice_', None) self._direct_parent_ = getattr(obj, '_direct_parent_', None) self._parent_index_ = getattr(obj, '_parent_index_', None) self._highest_parent_ = getattr(obj, '_highest_parent_', None) + self._current_slice_ = getattr(obj, '_current_slice_', None) self._tied_to_me_ = getattr(obj, '_tied_to_me_', None) self._tied_to_ = getattr(obj, '_tied_to_', None) self._realshape_ = getattr(obj, '_realshape_', None) @@ -122,6 +214,7 @@ class Param(ObservableArray, Nameable): self._realndim_ = getattr(obj, '_realndim_', None) self._updated_ = getattr(obj, '_updated_', None) self._original_ = getattr(obj, '_original_', None) + self._name = getattr(obj, 'name', None) def __array_wrap__(self, out_arr, context=None): return out_arr.view(numpy.ndarray) @@ -207,83 +300,7 @@ class Param(ObservableArray, Nameable): self._highest_parent_._unfix(self) unfix = unconstrain_fixed #=========================================================================== - # Constrain operations -> done - #=========================================================================== - def constrain(self, transform, warning=True, update=True): - """ - :param transform: the :py:class:`GPy.core.transformations.Transformation` - to constrain the this parameter to. - :param warning: print a warning if re-constraining parameters. - - Constrain the parameter to the given - :py:class:`GPy.core.transformations.Transformation`. - """ - if self._original_: # this happens when indexing created a copy of the array - self.__setitem__(slice(None), transform.initialize(self), update=False) - else: - self._direct_parent_._get_original(self).__setitem__(self._current_slice_, transform.initialize(self), update=False) - self._highest_parent_._add_constrain(self, transform, warning) - for t in self._tied_to_me_.iterkeys(): - if transform not in self._highest_parent_._constraints_for_collect(t, t._raveled_index()): - t._direct_parent_._get_original(t)[t._current_slice_].constrain(transform, warning, update) - if update: - self._highest_parent_.parameters_changed() - - def constrain_positive(self, warning=True): - """ - :param warning: print a warning if re-constraining parameters. - - Constrain this parameter to the default positive constraint. - """ - self.constrain(Logexp(), warning) - - def constrain_negative(self, warning=True): - """ - :param warning: print a warning if re-constraining parameters. - - Constrain this parameter to the default negative constraint. - """ - self.constrain(NegativeLogexp(), warning) - - def constrain_bounded(self, lower, upper, warning=True): - """ - :param lower, upper: the limits to bound this parameter to - :param warning: print a warning if re-constraining parameters. - - Constrain this parameter to lie within the given range. - """ - self.constrain(Logistic(lower, upper), warning) - - def unconstrain(self, *transforms): - """ - :param transforms: The transformations to unconstrain from. - - remove all :py:class:`GPy.core.transformations.Transformation` - transformats of this parameter object. - """ - self._highest_parent_._remove_constrain(self, *transforms) - - def unconstrain_positive(self): - """ - Remove positive constraint of this parameter. - """ - self.unconstrain(Logexp()) - - def unconstrain_negative(self): - """ - Remove negative constraint of this parameter. - """ - self.unconstrain(NegativeLogexp()) - - def unconstrain_bounded(self, lower, upper): - """ - :param lower, upper: the limits to unbound this parameter from - - Remove (lower, upper) bounded constrain from this parameter/ - """ - self.unconstrain(Logistic(lower, upper)) - #=========================================================================== - # Tying operations -> done + # Tying operations -> bugged, TODO #=========================================================================== def tie_to(self, param): """ @@ -667,6 +684,8 @@ class ParamConcatenation(object): return "\n".join(map(repr,self.params)) if __name__ == '__main__': + + from GPy.core.parameterized import Parameterized from GPy.core.parameter import Param diff --git a/GPy/core/parameterized.py b/GPy/core/parameterized.py index c3a80650..67bc18f5 100644 --- a/GPy/core/parameterized.py +++ b/GPy/core/parameterized.py @@ -11,17 +11,21 @@ from re import compile, _pattern_type import re class Parentable(object): - _direct_parent_ = None - _parent_index_ = None - + def __init__(self, direct_parent=None, highest_parent=None, parent_index=None): + super(Parentable,self).__init__() + self._direct_parent_ = direct_parent + self._parent_index_ = parent_index + self._highest_parent_ = highest_parent + def has_parent(self): return self._direct_parent_ is not None class Nameable(Parentable): _name = None - def __init__(self, name): + def __init__(self, name, direct_parent=None, highest_parent=None, parent_index=None): + super(Nameable,self).__init__(direct_parent, highest_parent, parent_index) self._name = name or self.__class__.__name__ - self.name = name + @property def name(self): return self._name @@ -68,7 +72,7 @@ def _adjust_name_for_printing(name): if name is not None: return name.replace(" ", "_").replace(".", "_").replace("-","").replace("+","").replace("!","").replace("*","").replace("/","") return '' -from parameter import ParamConcatenation, Param +from parameter import ParamConcatenation, Param, Constrainable from index_operations import ParameterIndexOperations,\ index_empty @@ -83,7 +87,7 @@ FIXED = False UNFIXED = True #=============================================================================== -class Parameterized(Nameable, Pickleable, Observable): +class Parameterized(Constrainable, Pickleable, Observable): """ Parameterized class @@ -126,7 +130,7 @@ class Parameterized(Nameable, Pickleable, Observable): and concatenate them. Printing m[''] will result in printing of all parameters in detail. """ def __init__(self, name=None): - super(Parameterized, self).__init__(name) + super(Parameterized, self).__init__(name=name) self._in_init_ = True self._constraints_ = None#ParameterIndexOperations() if not hasattr(self, "_parameters_"): @@ -187,7 +191,7 @@ class Parameterized(Nameable, Pickleable, Observable): Add all parameters to this parameter class, you can insert parameters - at any given index using the :py:func:`list.insert` syntax + at any given index using the :func:`list.insert` syntax """ if parameter in self._parameters_ and index is not None: # make sure fixes and constraints are indexed right @@ -277,10 +281,8 @@ class Parameterized(Nameable, Pickleable, Observable): return i = 0 sizes = [0] - #self.size = sum(p.size for p in self._parameters_) self._param_slices_ = [] for p in self._parameters_: - #if p._parent_ is None: p._direct_parent_ = self p._parent_index_ = i i += 1 @@ -289,10 +291,8 @@ class Parameterized(Nameable, Pickleable, Observable): not_unique = [] sizes.append(p.size+sizes[-1]) self._param_slices_.append(slice(sizes[-2], sizes[-1])) -# if p._fixes_ is not None: -# self._fixes_[p._raveled_index_for(p)] = p._fixes_ -# p._fixes_ = None pname = _adjust_name_for_printing(p.name) + # and makes sure to not delete programmatically added parameters if pname in self.__dict__: if isinstance(self.__dict__[pname], (Parameterized, Param)): if not p is self.__dict__[pname]: @@ -301,16 +301,6 @@ class Parameterized(Nameable, Pickleable, Observable): elif not (pname in not_unique): self.__dict__[pname] = p self._added_names_.add(pname) -# for p in self._parameters_: -# if hasattr(p, '_constraints_') and p._constraints_ is not None: -# for t, ind in p._constraints_.iteritems(): -# self.constraints.add(t, ind+self._offset_for(p)) -# p._constraints_.clear() -# if np.all(self._fixes_): # ==UNFIXED -# self._fixes_= None -# else: -# self.constraints.add(__fixed__, np.nonzero(~self._fixes_)[0]) -# self.parameters_changed() #=========================================================================== # Pickling operations #=========================================================================== @@ -375,20 +365,10 @@ class Parameterized(Nameable, Pickleable, Observable): if self.has_parent(): return g x = self._get_params() - #g = g.copy() - #for constraint, index in self.constraints.iteritems(): - # if constraint != __fixed__: - # g[index] = g[index] * constraint.gradfactor(x[index]) [numpy.put(g, i, g[i]*c.gradfactor(x[i])) for c,i in self.constraints.iteritems() if c != __fixed__] - #[np.put(g, i, v) for i, v in [(t[0], np.sum(g[t])) for t in self.tied_indices]] for p in self.flattened_parameters: for t,i in p._tied_to_me_.iteritems(): g[self._offset_for(p) + numpy.array(list(i))] += g[self._raveled_index_for(t)] - #[g[self._offset_for(t) + numpy.array(list(i))].__iadd__(v) for i, v in [[i, g[self._raveled_index_for(p)].sum()] for p in self.flattened_parameters for t,i in p._tied_to_me_.iteritems()]] -# if len(self.tied_indices) or len(self.fixed_indices): -# to_remove = np.hstack((self.fixed_indices + [t[1:] for t in self.tied_indices])) -# return np.delete(g, to_remove) -# else: if self._has_fixes(): return g[self._fixes_] return g #=========================================================================== @@ -401,18 +381,20 @@ class Parameterized(Nameable, Pickleable, Observable): return n def _get_params(self): # don't overwrite this anymore! - return numpy.hstack([x._get_params() for x in self._parameters_])#numpy.fromiter(itertools.chain(*itertools.imap(lambda x: x._get_params(), self._parameters_)), dtype=numpy.float64, count=sum(self._parameter_sizes_)) + return numpy.hstack([x._get_params() for x in self._parameters_]) def _set_params(self, params, update=True): # don't overwrite this anymore! [p._set_params(params[s], update=update) for p,s in itertools.izip(self._parameters_,self._param_slices_)] self.parameters_changed() def _get_params_transformed(self): + # transformed parameters (apply transformation rules) p = self._get_params() [numpy.put(p, ind, c.finv(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__] if self._has_fixes(): return p[self._fixes_] return p def _set_params_transformed(self, p): + # inverse apply transformations for parameters and set the resulting parameters p = p.copy() if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp [numpy.put(p, ind, c.f(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__] @@ -444,9 +426,18 @@ class Parameterized(Nameable, Pickleable, Observable): return 0 def _raveled_index_for(self, param): + """ + get the raveled index for a parameter + that is an int array, containing the indexes for the flattened + parameter inside this parameterized logic. + """ return param._raveled_index() + self._offset_for(param) def _raveled_index(self): + """ + get the raveled index for this object, + this is not in the global view of things! + """ return numpy.r_[:self.size] #=========================================================================== # Handle ties: @@ -515,6 +506,8 @@ class Parameterized(Nameable, Pickleable, Observable): reconstrained = self._remove_constrain(param, index=rav_i) # remove constraints before # if removing constraints before adding new is not wanted, just delete the above line! self.constraints.add(transform, rav_i) + param = self._get_original(param) + param._set_params(transform.initialize(param._get_params())) if warning and any(reconstrained): # if you want to print the whole params object, which was reconstrained use: # m = str(param[self._backtranslate_index(param, reconstrained)]) @@ -606,6 +599,12 @@ class Parameterized(Nameable, Pickleable, Observable): #=========================================================================== # Printing: #=========================================================================== + def _short(self): + # short string to print + if self.has_parent(): + return self._direct_parent_.hirarchy_name() + _adjust_name_for_printing(self.name) + else: + return _adjust_name_for_printing(self.name) def _parameter_names(self, add_name=False): if add_name: return [_adjust_name_for_printing(self.name) + "." + xi for x in self._parameters_ for xi in x._parameter_names(add_name=True)] @@ -635,6 +634,7 @@ class Parameterized(Nameable, Pickleable, Observable): def _ties_str(self): return [','.join(x._ties_str) for x in self.flattened_parameters] def __str__(self, header=True): + name = _adjust_name_for_printing(self.name) + "." constrs = self._constraints_str; ts = self._ties_str desc = self._description_str; names = self.parameter_names diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 25e1bd3c..ccfc5bb8 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -30,8 +30,8 @@ class SparseGP(GPBase): """ - def __init__(self, X, likelihood, kernel, Z, X_variance=None, normalize_X=False): - GPBase.__init__(self, X, likelihood, kernel, normalize_X=normalize_X, name="sparse GP") + def __init__(self, X, likelihood, kernel, Z, X_variance=None, normalize_X=False, name='sparse gp'): + GPBase.__init__(self, X, likelihood, kernel, normalize_X=normalize_X, name=name) self.Z = Z self.num_inducing = Z.shape[0] diff --git a/GPy/core/variational.py b/GPy/core/variational.py index 74287dcf..510d36da 100644 --- a/GPy/core/variational.py +++ b/GPy/core/variational.py @@ -3,8 +3,10 @@ Created on 6 Nov 2013 @author: maxz ''' +import numpy as np from parameterized import Parameterized from parameter import Param +from ..util.misc import param_to_array class Normal(Parameterized): ''' @@ -12,8 +14,53 @@ class Normal(Parameterized): holds the means and variances for a factorizing multivariate normal distribution ''' - def __init__(self, name, means, variances): + def __init__(self, means, variances, name='latent space'): Parameterized.__init__(self, name=name) self.means = Param("mean", means) self.variances = Param('variance', variances) - self.add_parameters(self.means, self.variances) \ No newline at end of file + self.add_parameters(self.means, self.variances) + + def plot(self, fignum=None, ax=None, colors=None): + """ + Plot latent space X in 1D: + + - if fig is given, create input_dim subplots in fig and plot in these + - if ax is given plot input_dim 1D latent space plots of X into each `axis` + - if neither fig nor ax is given create a figure with fignum and plot in there + + colors: + colors of different latent space dimensions input_dim + + """ + import pylab + if ax is None: + fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.means.shape[1])))) + if colors is None: + colors = pylab.gca()._get_lines.color_cycle + pylab.clf() + else: + colors = iter(colors) + plots = [] + means, variances = param_to_array(self.means, self.variances) + x = np.arange(means.shape[0]) + for i in range(means.shape[1]): + if ax is None: + a = fig.add_subplot(means.shape[1], 1, i + 1) + elif isinstance(ax, (tuple, list)): + a = ax[i] + else: + raise ValueError("Need one ax per latent dimnesion input_dim") + a.plot(means, c='k', alpha=.3) + plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) + a.fill_between(x, + means.T[i] - 2 * np.sqrt(variances.T[i]), + means.T[i] + 2 * np.sqrt(variances.T[i]), + facecolor=plots[-1].get_color(), + alpha=.3) + a.legend(borderaxespad=0.) + a.set_xlim(x.min(), x.max()) + if i < means.shape[1] - 1: + a.set_xticklabels('') + pylab.draw() + fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) + return fig diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 4c286a9e..badef423 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -285,6 +285,7 @@ def bgplvm_simulation(optimize='scg', k = kern.linear(input_dim, ARD=True) + kern.bias(input_dim, np.exp(-2)) + kern.white(input_dim, np.exp(-2)) # + kern.bias(input_dim) m = BayesianGPLVM(Y, input_dim, init="PCA", num_inducing=num_inducing, kernel=k) + import ipdb; ipdb.set_trace() # m.constrain('variance|noise', LogexpClipped()) m['gaussian'] = Y.var() / 100. diff --git a/GPy/kern/parts/linear.py b/GPy/kern/parts/linear.py index c8b2f618..c3bbf837 100644 --- a/GPy/kern/parts/linear.py +++ b/GPy/kern/parts/linear.py @@ -2,12 +2,12 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) -from kernpart import Kernpart import numpy as np -from ...util.linalg import tdot -from ...util.misc import fast_array_equal from scipy import weave -from GPy.core.parameter import Param +from kernpart import Kernpart +from ...util.linalg import tdot +from ...util.misc import fast_array_equal, param_to_array +from ...core.parameter import Param class Linear(Kernpart): """ @@ -235,8 +235,8 @@ class Linear(Kernpart): weave_options = {'headers' : [''], 'extra_compile_args': ['-fopenmp -O3'], #-march=native'], 'extra_link_args' : ['-lgomp']} - - N,num_inducing,input_dim = mu.shape[0],Z.shape[0],mu.shape[1] + + N,num_inducing,input_dim,mu = mu.shape[0],Z.shape[0],mu.shape[1],param_to_array(mu) weave.inline(code, support_code=support_code, libraries=['gomp'], arg_names=['N','num_inducing','input_dim','mu','AZZA','AZZA_2','target_mu','target_S','dL_dpsi2'], type_converters=weave.converters.blitz,**weave_options) @@ -271,6 +271,7 @@ class Linear(Kernpart): 'extra_link_args' : ['-lgomp']} N,num_inducing,input_dim = mu.shape[0],Z.shape[0],mu.shape[1] + mu, AZA, target, dL_dpsi2 = param_to_array(mu, AZA, target, dL_dpsi2) weave.inline(code, support_code=support_code, libraries=['gomp'], arg_names=['N','num_inducing','input_dim','AZA','target','dL_dpsi2'], type_converters=weave.converters.blitz,**weave_options) diff --git a/GPy/kern/parts/rbf.py b/GPy/kern/parts/rbf.py index 4259f8f7..c6e827b0 100644 --- a/GPy/kern/parts/rbf.py +++ b/GPy/kern/parts/rbf.py @@ -2,12 +2,12 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) -from kernpart import Kernpart import numpy as np from scipy import weave +from kernpart import Kernpart from ...util.linalg import tdot -from ...util.misc import fast_array_equal -from GPy.core.parameter import Param +from ...util.misc import fast_array_equal, param_to_array +from ...core.parameter import Param class RBF(Kernpart): """ @@ -134,7 +134,7 @@ class RBF(Kernpart): } """ num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim - X = np.asarray(X) + X = param_to_array(X) weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: code = """ @@ -152,7 +152,7 @@ class RBF(Kernpart): """ num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim # [np.add(target[1+q:2+q],var_len3[q]*np.sum(dvardLdK*np.square(X[:,q][:,None]-X2[:,q][None,:])),target[1+q:2+q]) for q in range(self.input_dim)] - X,X2 = np.asarray(X), numpy.asarray(X2) + X, X2 = param_to_array(X, X2) weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: target[1] += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dK) diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index 9f19393b..5223397f 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -27,7 +27,7 @@ class BayesianGPLVM(SparseGP, GPLVM): """ def __init__(self, likelihood_or_Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, - Z=None, kernel=None, **kwargs): + Z=None, kernel=None, name='bayesian gplvm', **kwargs): if type(likelihood_or_Y) is np.ndarray: likelihood = Gaussian(likelihood_or_Y) else: @@ -47,9 +47,8 @@ class BayesianGPLVM(SparseGP, GPLVM): if kernel is None: kernel = kern.rbf(input_dim) # + kern.white(input_dim) - SparseGP.__init__(self, X, likelihood, kernel, Z=Z, X_variance=X_variance, **kwargs) - - self.q = Normal('latent space', self.X, self.X_variance) + SparseGP.__init__(self, X=X, likelihood=likelihood, kernel=kernel, Z=Z, X_variance=X_variance, name=name, **kwargs) + self.q = Normal(self.X, self.X_variance) self.add_parameter(self.q, gradient=self._dbound_dmuS, index=0) self.ensure_default_constraints() @@ -252,50 +251,6 @@ class BayesianGPLVM(SparseGP, GPLVM): controller.deactivate() return controller.view - def plot_X_1d(self, fignum=None, ax=None, colors=None): - """ - Plot latent space X in 1D: - - - if fig is given, create input_dim subplots in fig and plot in these - - if ax is given plot input_dim 1D latent space plots of X into each `axis` - - if neither fig nor ax is given create a figure with fignum and plot in there - - colors: - colors of different latent space dimensions input_dim - - """ - import pylab - if ax is None: - fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.X.shape[1])))) - if colors is None: - colors = pylab.gca()._get_lines.color_cycle - pylab.clf() - else: - colors = iter(colors) - plots = [] - x = np.arange(self.X.shape[0]) - for i in range(self.X.shape[1]): - if ax is None: - a = fig.add_subplot(self.X.shape[1], 1, i + 1) - elif isinstance(ax, (tuple, list)): - a = ax[i] - else: - raise ValueError("Need one ax per latent dimnesion input_dim") - a.plot(self.X, c='k', alpha=.3) - plots.extend(a.plot(x, self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) - a.fill_between(x, - self.X.T[i] - 2 * np.sqrt(self.X_variance.T[i]), - self.X.T[i] + 2 * np.sqrt(self.X_variance.T[i]), - facecolor=plots[-1].get_color(), - alpha=.3) - a.legend(borderaxespad=0.) - a.set_xlim(x.min(), x.max()) - if i < self.X.shape[1] - 1: - a.set_xticklabels('') - pylab.draw() - fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) - return fig - def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): """ objective function for fitting the latent variables for test points diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index 8e1471d2..2b9ef805 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -323,9 +323,6 @@ class MRD(Model): else: return pylab.gcf() - def plot_X_1d(self, *a, **kw): - return self.gref.plot_X_1d(*a, **kw) - def plot_X(self, fignum=None, ax=None): fig = self._handle_plotting(fignum, ax, lambda i, g, ax: ax.imshow(g.X)) return fig diff --git a/GPy/util/misc.py b/GPy/util/misc.py index 5866ecf9..09d1178e 100644 --- a/GPy/util/misc.py +++ b/GPy/util/misc.py @@ -4,6 +4,18 @@ import numpy as np from scipy import weave +### make a parameter to its corresponding array: +def param_to_array(*param): + """ + Convert an arbitrary number of parameters to :class:ndarray class objects. This is for + converting parameter objects to numpy arrays, when using scipy.weave.inline routine. + In scipy.weave.blitz there is no automatic array detection (even when the array inherits + from :class:ndarray)""" + assert len(param) > 0, "At least one parameter needed" + if len(param) == 1: + return param[0].view(np.ndarray) + return map(lambda x: x.view(np.ndarray), param) + def opt_wrapper(m, **kwargs): """ This function just wraps the optimization procedure of a GPy @@ -106,6 +118,7 @@ def fast_array_equal(A, B): elif ((A == None) and (B != None)) or ((A != None) and (B == None)): return False elif A.shape == B.shape: + A, B = param_to_array(A, B) if A.ndim == 2: N, D = [int(i) for i in A.shape] value = weave.inline(code2, support_code=support_code, @@ -123,7 +136,6 @@ def fast_array_equal(A, B): return value - if __name__ == '__main__': import pylab as plt X = np.linspace(1,10, 100)[:, None]