From e1bb3e508e75847f57d2a32c18594a6ecdf5eb90 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Sat, 7 Dec 2013 18:45:24 +0000 Subject: [PATCH] naming and pil changes --- GPy/core/gp.py | 4 ++-- GPy/core/gp_base.py | 4 ---- GPy/core/index_operations.py | 22 ++++++++++++++++------ GPy/core/parameter.py | 4 ++-- GPy/core/parameterized.py | 31 ++++++++++++++----------------- GPy/kern/kern.py | 4 ++-- GPy/kern/parts/rbf.py | 13 ++++++++----- GPy/models/gp_regression.py | 4 ++-- GPy/models/gplvm.py | 6 +++--- GPy/util/plot_latent.py | 29 ++++++++++++++++------------- GPy/util/visualize.py | 14 +++++++------- 11 files changed, 72 insertions(+), 63 deletions(-) diff --git a/GPy/core/gp.py b/GPy/core/gp.py index acba1f3f..74e2a48f 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -21,8 +21,8 @@ class GP(GPBase): .. Note:: Multiple independent outputs are allowed using columns of Y """ - def __init__(self, X, likelihood, kernel, normalize_X=False): - super(GP, self).__init__(X, likelihood, kernel, normalize_X=normalize_X) + def __init__(self, X, likelihood, kernel, normalize_X=False, name='gp'): + super(GP, self).__init__(X, likelihood, kernel, normalize_X=normalize_X, name=name) #self._set_params(self._get_params()) def getstate(self): diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index 6e04e1fd..71d6991c 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -51,8 +51,6 @@ class GPBase(Model): return Model.getstate(self) + [self.X, self.num_data, self.input_dim, - self.kern, - self.likelihood, self.output_dim, self._Xoffset, self._Xscale, @@ -62,8 +60,6 @@ class GPBase(Model): self._Xscale = state.pop() self._Xoffset = state.pop() self.output_dim = state.pop() - self.likelihood = state.pop() - self.kern = state.pop() self.input_dim = state.pop() self.num_data = state.pop() self.X = state.pop() diff --git a/GPy/core/index_operations.py b/GPy/core/index_operations.py index 2e1fc774..ea7dfe2d 100644 --- a/GPy/core/index_operations.py +++ b/GPy/core/index_operations.py @@ -9,9 +9,12 @@ from parameter import Param from collections import defaultdict class ParamDict(defaultdict): - def __init__(self, default=lambda: numpy.array([], dtype=int)): - defaultdict.__init__(self, default) - + def __init__(self): + """ + Default will be self._default, if not set otherwise + """ + defaultdict.__init__(self, self.default_factory) + def __getitem__(self, key): try: return defaultdict.__getitem__(self, key) @@ -35,7 +38,14 @@ class ParamDict(defaultdict): if numpy.all(a==key) and a._parent_index_==key._parent_index_: return super(ParamDict, self).__setitem__(a, value) defaultdict.__setitem__(self, key, value) - + +class SetDict(ParamDict): + def default_factory(self): + return set() + +class IntArrayDict(ParamDict): + def default_factory(self): + return numpy.int_([]) class ParameterIndexOperations(object): ''' @@ -52,11 +62,11 @@ class ParameterIndexOperations(object): #self._reverse = collections.defaultdict(list) def __getstate__(self): - return self._properties, self._reverse + return self._properties#, self._reverse def __setstate__(self, state): self._properties = state[0] - self._reverse = state[1] + # self._reverse = state[1] def iteritems(self): return self._properties.iteritems() diff --git a/GPy/core/parameter.py b/GPy/core/parameter.py index 78e53d5b..a7ba39d9 100644 --- a/GPy/core/parameter.py +++ b/GPy/core/parameter.py @@ -61,7 +61,7 @@ class ObservableArray(ListArray, Observable): return self.__setitem__(slice(start, stop), val) -class Param(ObservableArray, Nameable, Pickleable): +class Param(ObservableArray, Nameable): """ Parameter object for GPy models. @@ -128,7 +128,7 @@ class Param(ObservableArray, Nameable, Pickleable): #=========================================================================== # Pickling operations #=========================================================================== - def __reduce__(self): + def __reduce_ex__(self): func, args, state = super(Param, self).__reduce__() return func, args, (state, (self.name, diff --git a/GPy/core/parameterized.py b/GPy/core/parameterized.py index 36ae547e..c3a80650 100644 --- a/GPy/core/parameterized.py +++ b/GPy/core/parameterized.py @@ -92,10 +92,9 @@ class Parameterized(Nameable, Pickleable, Observable): Printing parameters: - print m: prints a nice summary over all parameters - - print m.name: prints details for all the parameters - which start with name - - print m['.*name']: prints details for all the parameters - which contain "name" + - print m.name: prints details for parameter with name 'name' + - print m[regexp]: prints details for all the parameters + which match (!) regexp - print m['']: prints details for all parameters Fields: @@ -108,11 +107,10 @@ class Parameterized(Nameable, Pickleable, Observable): Tied_to: which paramter it is tied to. Getting and setting parameters: - - Two ways to get parameters: - - - m.name regular expression matches all parameters beginning with name - - m['name'] regular expression matches all parameters with name + + Set all values in parameter to one: + + m.name.to.parameter = 1 Handling of constraining, fixing and tieing parameters: @@ -120,10 +118,10 @@ class Parameterized(Nameable, Pickleable, Observable): - m.name[:,1].constrain_positive() - m.name[0].tie_to(m.name[1]) - + Fixing parameters will fix them to the value they are right now. If you change the parameters value, the parameter will be fixed to the new value! - + If you want to operate on all parameters use m[''] to wildcard select all paramters and concatenate them. Printing m[''] will result in printing of all parameters in detail. """ @@ -355,20 +353,18 @@ class Parameterized(Nameable, Pickleable, Observable): return [ self._fixes_, self._constraints_, - self._priors_, self._parameters_, self._name, - self.gradient_mapping, + #self.gradient_mapping, self._added_names_, ] def setstate(self, state): self._added_names_ = state.pop() - self.gradient_mapping = state.pop(), + #self.gradient_mapping = state.pop() self._name = state.pop() self._parameters_ = state.pop() self._connect_parameters() - self._priors = state.pop() self._constraints_ = state.pop() self._fixes_ = state.pop() self.parameters_changed() @@ -639,9 +635,10 @@ class Parameterized(Nameable, Pickleable, Observable): def _ties_str(self): return [','.join(x._ties_str) for x in self.flattened_parameters] def __str__(self, header=True): + name = _adjust_name_for_printing(self.name) + "." constrs = self._constraints_str; ts = self._ties_str desc = self._description_str; names = self.parameter_names - nl = max([len(str(x)) for x in names + [_adjust_name_for_printing(self.name)]]) + nl = max([len(str(x)) for x in names + [name]]) sl = max([len(str(x)) for x in desc + ["Value"]]) cl = max([len(str(x)) if x else 0 for x in constrs + ["Constraint"]]) tl = max([len(str(x)) if x else 0 for x in ts + ["Tied to"]]) @@ -652,7 +649,7 @@ class Parameterized(Nameable, Pickleable, Observable): #to_print = [format_spec.format(p=p, const=c, t=t) if isinstance(p, Param) else p.__str__(header=False) for p, c, t in itertools.izip(self._parameters_, constrs, ts)] sep = '-'*(nl+sl+cl+tl+8*2+3) if header: - header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}}".format(nl, sl, cl, tl).format(_adjust_name_for_printing(self.name), "Value", "Constraint", "Tied to") + header = " {{0:<{0}s}} | {{1:^{1}s}} | {{2:^{2}s}} | {{3:^{3}s}}".format(nl, sl, cl, tl).format(name, "Value", "Constraint", "Tied to") #header += '\n' + sep to_print.insert(0, header) return '\n'.format(sep).join(to_print) diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 2d3a78c6..782cfa1b 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -59,7 +59,7 @@ class kern(Parameterized): Get the current state of the class, here just all the indices, rest can get recomputed """ - return Parameterized.getstate(self) + [self._parameters_, + return Parameterized.getstate(self) + [#self._parameters_, #self.num_params, self.input_dim, self.input_slices, @@ -71,7 +71,7 @@ class kern(Parameterized): self.input_slices = state.pop() self.input_dim = state.pop() #self.num_params = state.pop() - self._parameters_ = state.pop() + #self._parameters_ = state.pop() Parameterized.setstate(self, state) diff --git a/GPy/kern/parts/rbf.py b/GPy/kern/parts/rbf.py index 8bd95dc5..4259f8f7 100644 --- a/GPy/kern/parts/rbf.py +++ b/GPy/kern/parts/rbf.py @@ -63,10 +63,11 @@ class RBF(Kernpart): #self._X, self._X2, self._params_save = np.empty(shape=(3, 1)) # a set of optional args to pass to weave - self.weave_options = {'headers' : [''], - 'extra_compile_args': ['-fopenmp -O3'], # -march=native'], - 'extra_link_args' : ['-lgomp']} - + # self.weave_options = {'headers' : [''], + # 'extra_compile_args': ['-fopenmp -O3'], # -march=native'], + # 'extra_link_args' : ['-lgomp']} + self.weave_options = {} + def on_input_change(self, X): #self._K_computations(X, None) pass @@ -133,7 +134,8 @@ class RBF(Kernpart): } """ num_data, num_inducing, input_dim = X.shape[0], X.shape[0], self.input_dim - weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) + X = np.asarray(X) + weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: code = """ int q,i,j; @@ -150,6 +152,7 @@ class RBF(Kernpart): """ num_data, num_inducing, input_dim = X.shape[0], X2.shape[0], self.input_dim # [np.add(target[1+q:2+q],var_len3[q]*np.sum(dvardLdK*np.square(X[:,q][:,None]-X2[:,q][None,:])),target[1+q:2+q]) for q in range(self.input_dim)] + X,X2 = np.asarray(X), numpy.asarray(X2) weave.inline(code, arg_names=['num_data', 'num_inducing', 'input_dim', 'X', 'X2', 'target', 'dvardLdK', 'var_len3'], type_converters=weave.converters.blitz, **self.weave_options) else: target[1] += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dK) diff --git a/GPy/models/gp_regression.py b/GPy/models/gp_regression.py index cc58fbb1..d2d41c8e 100644 --- a/GPy/models/gp_regression.py +++ b/GPy/models/gp_regression.py @@ -25,13 +25,13 @@ class GPRegression(GP): """ - def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False): + def __init__(self, X, Y, kernel=None, normalize_X=False, normalize_Y=False, name="gp regression"): if kernel is None: kernel = kern.rbf(X.shape[1]) likelihood = likelihoods.Gaussian(Y, normalize=normalize_Y) - super(GPRegression, self).__init__(X, likelihood, kernel, normalize_X=normalize_X) + super(GPRegression, self).__init__(X, likelihood, kernel, normalize_X=normalize_X, name=name) self.ensure_default_constraints() def getstate(self): diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py index 28367be9..6b5ac07f 100644 --- a/GPy/models/gplvm.py +++ b/GPy/models/gplvm.py @@ -28,15 +28,15 @@ class GPLVM(GP): :type init: 'PCA'|'random' """ - def __init__(self, Y, input_dim, init='PCA', X=None, kernel=None, normalize_Y=False): + def __init__(self, Y, input_dim, init='PCA', X=None, kernel=None, normalize_Y=False, name="gplvm"): if X is None: X = self.initialise_latent(init, input_dim, Y) if kernel is None: kernel = kern.rbf(input_dim, ARD=input_dim > 1) + kern.bias(input_dim, np.exp(-2)) likelihood = Gaussian(Y, normalize=normalize_Y, variance=np.exp(-2.)) - GP.__init__(self, X, likelihood, kernel, normalize_X=False) + GP.__init__(self, X, likelihood, kernel, normalize_X=False, name=name) self.X = Param('q_mean', self.X) - self.add_parameter(self.X, self.dK_dX, 0) + self.add_parameter(self.X, gradient=self.dK_dX, index=0) #self.set_prior('.*X', Gaussian_prior(0, 1)) self.ensure_default_constraints() diff --git a/GPy/util/plot_latent.py b/GPy/util/plot_latent.py index 62442650..cecb811c 100644 --- a/GPy/util/plot_latent.py +++ b/GPy/util/plot_latent.py @@ -38,9 +38,11 @@ def plot_latent(model, labels=None, which_indices=None, input_1, input_2 = most_significant_input_dimensions(model, which_indices) + X = np.asarray(model.X) + # first, plot the output variance as a function of the latent space - Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) - Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) + Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(X[:, [input_1, input_2]], resolution=resolution) + Xtest_full = np.zeros((Xtest.shape[0], X.shape[1])) def plot_function(x): Xtest_full[:, [input_1, input_2]] = x @@ -48,7 +50,7 @@ def plot_latent(model, labels=None, which_indices=None, var = var[:, :1] return np.log(var) view = ImshowController(ax, plot_function, - tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), + tuple(X[:, [input_1, input_2]].min(0)) + tuple(X[:, [input_1, input_2]].max(0)), resolution, aspect=aspect, interpolation='bilinear', cmap=pb.cm.binary) @@ -74,11 +76,11 @@ def plot_latent(model, labels=None, which_indices=None, index = np.nonzero(labels == ul)[0] if model.input_dim == 1: - x = model.X[index, input_1] + x = X[index, input_1] y = np.zeros(index.size) else: - x = model.X[index, input_1] - y = model.X[index, input_2] + x = X[index, input_1] + y = X[index, input_2] ax.scatter(x, y, marker=m, s=s, color=util.plot.Tango.nextMedium(), label=this_label) ax.set_xlabel('latent dimension %i' % input_1) @@ -117,16 +119,17 @@ def plot_magnification(model, labels=None, which_indices=None, labels = np.ones(model.num_data) input_1, input_2 = most_significant_input_dimensions(model, which_indices) - + X = np.asarray(model.X) + # first, plot the output variance as a function of the latent space - Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) - Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) + Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(X[:, [input_1, input_2]], resolution=resolution) + Xtest_full = np.zeros((Xtest.shape[0], X.shape[1])) def plot_function(x): Xtest_full[:, [input_1, input_2]] = x mf=model.magnification(Xtest_full) return mf view = ImshowController(ax, plot_function, - tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), + tuple(X.min(0)[:, [input_1, input_2]]) + tuple(X.max(0)[:, [input_1, input_2]]), resolution, aspect=aspect, interpolation='bilinear', cmap=pb.cm.gray) @@ -149,11 +152,11 @@ def plot_magnification(model, labels=None, which_indices=None, index = np.nonzero(labels == ul)[0] if model.input_dim == 1: - x = model.X[index, input_1] + x = X[index, input_1] y = np.zeros(index.size) else: - x = model.X[index, input_1] - y = model.X[index, input_2] + x = X[index, input_1] + y = X[index, input_2] ax.scatter(x, y, marker=m, s=s, color=util.plot.Tango.nextMedium(), label=this_label) ax.set_xlabel('latent dimension %i' % input_1) diff --git a/GPy/util/visualize.py b/GPy/util/visualize.py index 683c6c67..b13c100c 100644 --- a/GPy/util/visualize.py +++ b/GPy/util/visualize.py @@ -92,7 +92,7 @@ class lvm(matplotlib_show): :param latent_axes: the axes where the latent visualization should be plotted. """ if vals == None: - vals = model.X[0] + vals = np.asarray(model.X[0]) matplotlib_show.__init__(self, vals, axes=latent_axes) @@ -171,21 +171,21 @@ class lvm_subplots(lvm): latent_axes is a np array of dimension np.ceil(input_dim/2), one for each pair of the latent dimensions. """ - def __init__(self, vals, Model, data_visualize, latent_axes=None, sense_axes=None): - self.nplots = int(np.ceil(Model.input_dim/2.))+1 + def __init__(self, vals, model, data_visualize, latent_axes=None, sense_axes=None): + self.nplots = int(np.ceil(model.input_dim/2.))+1 assert len(latent_axes)==self.nplots if vals==None: - vals = Model.X[0, :] + vals = np.asarray(model.X[0, :]) self.latent_values = vals for i, axis in enumerate(latent_axes): if i == self.nplots-1: - if self.nplots*2!=Model.input_dim: + if self.nplots*2!=model.input_dim: latent_index = [i*2, i*2] - lvm.__init__(self, self.latent_vals, Model, data_visualize, axis, sense_axes, latent_index=latent_index) + lvm.__init__(self, self.latent_vals, model, data_visualize, axis, sense_axes, latent_index=latent_index) else: latent_index = [i*2, i*2+1] - lvm.__init__(self, self.latent_vals, Model, data_visualize, axis, latent_index=latent_index) + lvm.__init__(self, self.latent_vals, model, data_visualize, axis, latent_index=latent_index)