From 50a68e1a65f95386b5dccee5cd968fec03038356 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Mon, 29 Apr 2013 12:11:36 +0100 Subject: [PATCH 1/6] Yak shaving --- GPy/models/GPLVM.py | 6 +++--- GPy/models/sparse_GP.py | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/GPy/models/GPLVM.py b/GPy/models/GPLVM.py index c0d9429a..157fe1c3 100644 --- a/GPy/models/GPLVM.py +++ b/GPy/models/GPLVM.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +### Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) @@ -91,8 +91,8 @@ class GPLVM(GP): Xtest_full[:, :2] = Xtest mu, var, low, up = self.predict(Xtest_full) var = var[:, :1] - ax.imshow(var.reshape(resolution, resolution).T[::-1, :], - extent=[xmin[0], xmax[0], xmin[1], xmax[1]], cmap=pb.cm.binary,interpolation='bilinear') + ax.imshow(var.reshape(resolution, resolution).T, + extent=[xmin[0], xmax[0], xmin[1], xmax[1]], cmap=pb.cm.binary,interpolation='bilinear',origin='lower') for i,ul in enumerate(np.unique(labels)): if type(ul) is np.string_: diff --git a/GPy/models/sparse_GP.py b/GPy/models/sparse_GP.py index a085090d..58f02cca 100644 --- a/GPy/models/sparse_GP.py +++ b/GPy/models/sparse_GP.py @@ -102,13 +102,14 @@ class sparse_GP(GP): tmp = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(self.Bi),lower=1,trans=1)[0] self.C = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(tmp.T),lower=1,trans=1)[0] - #self.Cpsi1V = np.dot(self.C,self.psi1V) #back substutue C into psi1V tmp,info1 = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(self.psi1V),lower=1,trans=0) tmp,info2 = linalg.lapack.flapack.dpotrs(self.LB,tmp,lower=1) self.Cpsi1V,info3 = linalg.lapack.flapack.dtrtrs(self.Lm,tmp,lower=1,trans=1) + #self.Cpsi1V = np.dot(self.C,self.psi1V) + + self.Cpsi1VVpsi1 = np.dot(self.Cpsi1V,self.psi1V.T) - self.Cpsi1VVpsi1 = np.dot(self.Cpsi1V,self.psi1V.T) #TODO: stabilize? self.E = tdot(self.Cpsi1V/sf) From aaaa1b5251d194dee235944fa6413165bc6f81a5 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Mon, 29 Apr 2013 16:21:38 +0100 Subject: [PATCH 2/6] model re compilation added --- GPy/core/model.py | 188 +++++++++++++++++++++++----------------------- 1 file changed, 95 insertions(+), 93 deletions(-) diff --git a/GPy/core/model.py b/GPy/core/model.py index f3542ce8..dac6d55e 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -2,17 +2,19 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) -import numpy as np -from scipy import optimize -import sys, pdb -import multiprocessing as mp -from GPy.util.misc import opt_wrapper -#import numdifftools as ndt -from parameterised import parameterised, truncate_pad -import priors -from ..util.linalg import jitchol -from ..inference import optimization from .. import likelihoods +from ..inference import optimization +from ..util.linalg import jitchol +from GPy.util.misc import opt_wrapper +from parameterised import parameterised, truncate_pad +from scipy import optimize +import multiprocessing as mp +import numpy as np +import priors +import re +import sys +import pdb +# import numdifftools as ndt class model(parameterised): def __init__(self): @@ -24,14 +26,14 @@ class model(parameterised): self.preferred_optimizer = 'tnc' def _get_params(self): raise NotImplementedError, "this needs to be implemented to use the model class" - def _set_params(self,x): + def _set_params(self, x): raise NotImplementedError, "this needs to be implemented to use the model class" def log_likelihood(self): raise NotImplementedError, "this needs to be implemented to use the model class" def _log_likelihood_gradients(self): raise NotImplementedError, "this needs to be implemented to use the model class" - def set_prior(self,which,what): + def set_prior(self, which, what): """ Sets priors on the model parameters. @@ -52,59 +54,59 @@ class model(parameterised): which = self.grep_param_names(which) - #check tied situation - tie_partial_matches = [tie for tie in self.tied_indices if (not set(tie).isdisjoint(set(which))) & (not set(tie)==set(which))] + # check tied situation + tie_partial_matches = [tie for tie in self.tied_indices if (not set(tie).isdisjoint(set(which))) & (not set(tie) == set(which))] if len(tie_partial_matches): raise ValueError, "cannot place prior across partial ties" - tie_matches = [tie for tie in self.tied_indices if set(which)==set(tie) ] - if len(tie_matches)>1: + tie_matches = [tie for tie in self.tied_indices if set(which) == set(tie) ] + if len(tie_matches) > 1: raise ValueError, "cannot place prior across multiple ties" - elif len(tie_matches)==1: - which = which[:1]# just place a prior object on the first parameter + elif len(tie_matches) == 1: + which = which[:1] # just place a prior object on the first parameter - #check constraints are okay + # check constraints are okay if isinstance(what, (priors.gamma, priors.log_Gaussian)): - assert not np.any(which[:,None]==self.constrained_negative_indices), "constraint and prior incompatible" - assert not np.any(which[:,None]==self.constrained_bounded_indices), "constraint and prior incompatible" + assert not np.any(which[:, None] == self.constrained_negative_indices), "constraint and prior incompatible" + assert not np.any(which[:, None] == self.constrained_bounded_indices), "constraint and prior incompatible" unconst = np.setdiff1d(which, self.constrained_positive_indices) if len(unconst): print "Warning: constraining parameters to be positive:" - print '\n'.join([n for i,n in enumerate(self._get_param_names()) if i in unconst]) + print '\n'.join([n for i, n in enumerate(self._get_param_names()) if i in unconst]) print '\n' self.constrain_positive(unconst) - elif isinstance(what,priors.Gaussian): - assert not np.any(which[:,None]==self.all_constrained_indices()), "constraint and prior incompatible" + elif isinstance(what, priors.Gaussian): + assert not np.any(which[:, None] == self.all_constrained_indices()), "constraint and prior incompatible" else: raise ValueError, "prior not recognised" - #store the prior in a local list + # store the prior in a local list for w in which: self.priors[w] = what - def get_gradient(self,name, return_names=False): + def get_gradient(self, name, return_names=False): """ Get model gradient(s) by name. The name is applied as a regular expression and all parameters that match that regular expression are returned. """ matches = self.grep_param_names(name) if len(matches): if return_names: - return self._log_likelihood_gradients()[matches], np.asarray(self._get_param_names())[matches].tolist() + return self._log_likelihood_gradients()[matches], np.asarray(self._get_param_names())[matches].tolist() else: return self._log_likelihood_gradients()[matches] else: - raise AttributeError, "no parameter matches %s"%name + raise AttributeError, "no parameter matches %s" % name def log_prior(self): """evaluate the prior""" - return np.sum([p.lnpdf(x) for p, x in zip(self.priors,self._get_params()) if p is not None]) + return np.sum([p.lnpdf(x) for p, x in zip(self.priors, self._get_params()) if p is not None]) def _log_prior_gradients(self): """evaluate the gradients of the priors""" x = self._get_params() ret = np.zeros(x.size) - [np.put(ret,i,p.lnpdf_grad(xx)) for i,(p,xx) in enumerate(zip(self.priors,x)) if not p is None] + [np.put(ret, i, p.lnpdf_grad(xx)) for i, (p, xx) in enumerate(zip(self.priors, x)) if not p is None] return ret def _transform_gradients(self, g): @@ -113,13 +115,13 @@ class model(parameterised): """ x = self._get_params() - g[self.constrained_positive_indices] = g[self.constrained_positive_indices]*x[self.constrained_positive_indices] - g[self.constrained_negative_indices] = g[self.constrained_negative_indices]*x[self.constrained_negative_indices] - [np.put(g,i,g[i]*(x[i]-l)*(h-x[i])/(h-l)) for i,l,h in zip(self.constrained_bounded_indices, self.constrained_bounded_lowers, self.constrained_bounded_uppers)] - [np.put(g,i,v) for i,v in [(t[0],np.sum(g[t])) for t in self.tied_indices]] + g[self.constrained_positive_indices] = g[self.constrained_positive_indices] * x[self.constrained_positive_indices] + g[self.constrained_negative_indices] = g[self.constrained_negative_indices] * x[self.constrained_negative_indices] + [np.put(g, i, g[i] * (x[i] - l) * (h - x[i]) / (h - l)) for i, l, h in zip(self.constrained_bounded_indices, self.constrained_bounded_lowers, self.constrained_bounded_uppers)] + [np.put(g, i, v) for i, v in [(t[0], np.sum(g[t])) for t in self.tied_indices]] if len(self.tied_indices) or len(self.constrained_fixed_indices): - to_remove = np.hstack((self.constrained_fixed_indices+[t[1:] for t in self.tied_indices])) - return np.delete(g,to_remove) + to_remove = np.hstack((self.constrained_fixed_indices + [t[1:] for t in self.tied_indices])) + return np.delete(g, to_remove) else: return g @@ -129,15 +131,15 @@ class model(parameterised): Randomize the model. Make this draw from the prior if one exists, else draw from N(0,1) """ - #first take care of all parameters (from N(0,1)) + # first take care of all parameters (from N(0,1)) x = self._get_params_transformed() x = np.random.randn(x.size) self._set_params_transformed(x) - #now draw from prior where possible + # now draw from prior where possible x = self._get_params() - [np.put(x,i,p.rvs(1)) for i,p in enumerate(self.priors) if not p is None] + [np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None] self._set_params(x) - self._set_params_transformed(self._get_params_transformed())#makes sure all of the tied parameters get the same init (since there's only one prior object...) + self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...) def optimize_restarts(self, Nrestarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): @@ -171,10 +173,10 @@ class model(parameterised): pool = mp.Pool(processes=num_processes) for i in range(Nrestarts): self.randomize() - job = pool.apply_async(opt_wrapper, args = (self,), kwds = kwargs) + job = pool.apply_async(opt_wrapper, args=(self,), kwds=kwargs) jobs.append(job) - pool.close() # signal that no more data coming in + pool.close() # signal that no more data coming in pool.join() # wait for all the tasks to complete except KeyboardInterrupt: print "Ctrl+c received, terminating and joining pool." @@ -190,10 +192,10 @@ class model(parameterised): self.optimization_runs.append(jobs[i].get()) if verbose: - print("Optimization restart {0}/{1}, f = {2}".format(i+1, Nrestarts, self.optimization_runs[-1].f_opt)) + print("Optimization restart {0}/{1}, f = {2}".format(i + 1, Nrestarts, self.optimization_runs[-1].f_opt)) except Exception as e: if robust: - print("Warning - optimization restart {0}/{1} failed".format(i+1, Nrestarts)) + print("Warning - optimization restart {0}/{1} failed".format(i + 1, Nrestarts)) else: raise e @@ -203,22 +205,22 @@ class model(parameterised): else: self._set_params_transformed(initial_parameters) - def ensure_default_constraints(self,warn=False): + def ensure_default_constraints(self, warn=False): """ Ensure that any variables which should clearly be positive have been constrained somehow. """ - positive_strings = ['variance','lengthscale', 'precision'] + positive_strings = ['variance', 'lengthscale', 'precision'] param_names = self._get_param_names() currently_constrained = self.all_constrained_indices() to_make_positive = [] for s in positive_strings: for i in self.grep_param_names(s): if not (i in currently_constrained): - to_make_positive.append(param_names[i]) + to_make_positive.append(re.escape(param_names[i])) if warn: - print "Warning! constraining %s postive"%name + print "Warning! constraining %s positive" % s if len(to_make_positive): - self.constrain_positive('('+'|'.join(to_make_positive)+')') + self.constrain_positive('(' + '|'.join(to_make_positive) + ')') @@ -236,14 +238,14 @@ class model(parameterised): self._set_params_transformed(x) LL_gradients = self._transform_gradients(self._log_likelihood_gradients()) prior_gradients = self._transform_gradients(self._log_prior_gradients()) - return - LL_gradients - prior_gradients + return -LL_gradients - prior_gradients def objective_and_gradients(self, x): self._set_params_transformed(x) - obj_f = -self.log_likelihood() - self.log_prior() + obj_f = -self.log_likelihood() - self.log_prior() LL_gradients = self._transform_gradients(self._log_likelihood_gradients()) prior_gradients = self._transform_gradients(self._log_prior_gradients()) - obj_grads = - LL_gradients - prior_gradients + obj_grads = -LL_gradients - prior_gradients return obj_f, obj_grads def optimize(self, optimizer=None, start=None, **kwargs): @@ -269,7 +271,7 @@ class model(parameterised): self._set_params_transformed(opt.x_opt) - def optimize_SGD(self, momentum = 0.1, learning_rate = 0.01, iterations = 20, **kwargs): + def optimize_SGD(self, momentum=0.1, learning_rate=0.01, iterations=20, **kwargs): # assert self.Y.shape[1] > 1, "SGD only works with D > 1" sgd = SGD.StochasticGD(self, iterations, learning_rate, momentum, **kwargs) sgd.run() @@ -277,8 +279,8 @@ class model(parameterised): def Laplace_covariance(self): """return the covariance matric of a Laplace approximatino at the current (stationary) point""" - #TODO add in the prior contributions for MAP estimation - #TODO fix the hessian for tied, constrained and fixed components + # TODO add in the prior contributions for MAP estimation + # TODO fix the hessian for tied, constrained and fixed components if hasattr(self, 'log_likelihood_hessian'): A = -self.log_likelihood_hessian() @@ -292,8 +294,8 @@ class model(parameterised): A = -h(x) self._set_params(x) # check for almost zero components on the diagonal which screw up the cholesky - aa = np.nonzero((np.diag(A)<1e-6) & (np.diag(A)>0.))[0] - A[aa,aa] = 0. + aa = np.nonzero((np.diag(A) < 1e-6) & (np.diag(A) > 0.))[0] + A[aa, aa] = 0. return A def Laplace_evidence(self): @@ -304,11 +306,11 @@ class model(parameterised): hld = np.sum(np.log(np.diag(jitchol(A)[0]))) except: return np.nan - return 0.5*self._get_params().size*np.log(2*np.pi) + self.log_likelihood() - hld + return 0.5 * self._get_params().size * np.log(2 * np.pi) + self.log_likelihood() - hld def __str__(self): s = parameterised.__str__(self).split('\n') - #add priors to the string + # add priors to the string strs = [str(p) if p is not None else '' for p in self.priors] width = np.array(max([len(p) for p in strs] + [5])) + 4 @@ -319,16 +321,16 @@ class model(parameterised): obj_funct += ', Log prior: {0:.3e}, LL+prior = {0:.3e}'.format(log_prior, log_like + log_prior) obj_funct += '\n\n' s[0] = obj_funct + s[0] - s[0] += "|{h:^{col}}".format(h = 'Prior', col = width) - s[1] += '-'*(width + 1) + s[0] += "|{h:^{col}}".format(h='Prior', col=width) + s[1] += '-' * (width + 1) - for p in range(2, len(strs)+2): - s[p] += '|{prior:^{width}}'.format(prior = strs[p-2], width = width) + for p in range(2, len(strs) + 2): + s[p] += '|{prior:^{width}}'.format(prior=strs[p - 2], width=width) return '\n'.join(s) - def checkgrad(self, target_param = None, verbose=False, step=1e-6, tolerance = 1e-3): + def checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3): """ Check the gradient of the model by comparing to a numerical estimate. If the verbose flag is passed, invividual components are tested (and printed) @@ -348,27 +350,27 @@ class model(parameterised): x = self._get_params_transformed().copy() if not verbose: - #just check the global ratio - dx = step*np.sign(np.random.uniform(-1,1,x.size)) + # just check the global ratio + dx = step * np.sign(np.random.uniform(-1, 1, x.size)) - #evaulate around the point x - f1, g1 = self.objective_and_gradients(x+dx) - f2, g2 = self.objective_and_gradients(x-dx) + # evaulate around the point x + f1, g1 = self.objective_and_gradients(x + dx) + f2, g2 = self.objective_and_gradients(x - dx) gradient = self.objective_function_gradients(x) - numerical_gradient = (f1-f2)/(2*dx) - global_ratio = (f1-f2)/(2*np.dot(dx,gradient)) + numerical_gradient = (f1 - f2) / (2 * dx) + global_ratio = (f1 - f2) / (2 * np.dot(dx, gradient)) - if (np.abs(1.-global_ratio) Date: Mon, 29 Apr 2013 16:30:18 +0100 Subject: [PATCH 3/6] readded parameterized changes --- GPy/core/parameterised.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/GPy/core/parameterised.py b/GPy/core/parameterised.py index 4d1d6992..9d0dfc78 100644 --- a/GPy/core/parameterised.py +++ b/GPy/core/parameterised.py @@ -171,10 +171,18 @@ class parameterised(object): return expr def Nparam_transformed(self): - ties = 0 - for ar in self.tied_indices: - ties += ar.size - 1 - return self.Nparam - len(self.constrained_fixed_indices) - ties + """ + Compute the number of parameters after ties and fixing have been performed + """ + ties = 0 + for ti in self.tied_indices: + ties += ti.size - 1 + + fixes = 0 + for fi in self.constrained_fixed_indices: + fixes += len(fi) + + return self.Nparam - fixes - ties def constrain_positive(self, which): """ From de57f837f44060d77da2dd810f85781f024838aa Mon Sep 17 00:00:00 2001 From: Alan Saul Date: Mon, 29 Apr 2013 20:09:18 +0100 Subject: [PATCH 4/6] Trying to upgrade numpy version to 1.7.1 as there was an error introduced for weave on 1.7.0 causing tests to fail --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e7944d8a..6d188401 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,10 @@ before_install: - sudo apt-get install -qq python-matplotlib install: + - pip install --upgrade numpy==1.7.1 - pip install sphinx - pip install nose - pip install . --use-mirrors # command to run tests, e.g. python setup.py test script: - - nosetests GPy/testing \ No newline at end of file + - nosetests GPy/testing From 33521f676393d543465c43858ceeef04063e69b8 Mon Sep 17 00:00:00 2001 From: Neil Lawrence Date: Mon, 29 Apr 2013 21:45:15 +0100 Subject: [PATCH 5/6] Unification of the visualize object hierarchy and standardization of the click and move behaviour of lvm and lvm_dimselect. Set colours of input sensitivity histogram to red for left (port) and green for right (starboard). --- GPy/core/model.py | 2 +- GPy/examples/dimensionality_reduction.py | 44 +++- GPy/util/mocap.py | 1 - GPy/util/visualize.py | 317 +++++++++++++++-------- 4 files changed, 240 insertions(+), 124 deletions(-) diff --git a/GPy/core/model.py b/GPy/core/model.py index f3542ce8..6fa66429 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -253,7 +253,7 @@ class model(parameterised): :max_f_eval: maximum number of function evaluations :messages: whether to display during optimisation - :param optimzer: whice optimizer to use (defaults to self.preferred optimizer) + :param optimzer: which optimizer to use (defaults to self.preferred optimizer) :type optimzer: string TODO: valid strings? """ if optimizer is None: diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 9da161f2..75820407 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -81,11 +81,19 @@ def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300): else: m.ensure_default_constraints() - # plot - print(m) - m.plot_latent(labels=m.data_labels) - pb.figure() - pb.bar(np.arange(m.kern.D), 1. / m.input_sensitivity()) + y = m.likelihood.Y[0, :] + fig,(latent_axes,hist_axes) = plt.subplots(1,2) + plt.sca(latent_axes) + m.plot_latent() + data_show = GPy.util.visualize.vector_show(y) + lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes, hist_axes=hist_axes) + raw_input('Press enter to finish') + plt.close('all') + # # plot + # print(m) + # m.plot_latent(labels=m.data_labels) + # pb.figure() + # pb.bar(np.arange(m.kern.D), 1. / m.input_sensitivity()) return m def oil_100(): @@ -348,7 +356,7 @@ def brendan_faces(): ax = m.plot_latent() y = m.likelihood.Y[0, :] data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, invert=False, scale=False) - lvm_visualizer = GPy.util.visualize.lvm(m, data_show, ax) + lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax) raw_input('Press enter to finish') plt.close('all') @@ -365,7 +373,29 @@ def stick(): ax = m.plot_latent() y = m.likelihood.Y[0, :] data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect']) - lvm_visualizer = GPy.util.visualize.lvm(m, data_show, ax) + lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax) + raw_input('Press enter to finish') + plt.close('all') + + return m + +def cmu_mocap(subject='35', motion=['01'], in_place=True): + + data = GPy.util.datasets.cmu_mocap(subject, motion) + Y = data['Y'] + if in_place: + # Make figure move in place. + data['Y'][:, 0:3]=0.0 + m = GPy.models.GPLVM(data['Y'], 2, normalize_Y=True) + + # optimize + m.ensure_default_constraints() + m.optimize(messages=1, max_f_eval=10000) + + ax = m.plot_latent() + y = m.likelihood.Y[0, :] + data_show = GPy.util.visualize.skeleton_show(y[None, :], data['skel']) + lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax) raw_input('Press enter to finish') plt.close('all') diff --git a/GPy/util/mocap.py b/GPy/util/mocap.py index 76650086..174728bd 100644 --- a/GPy/util/mocap.py +++ b/GPy/util/mocap.py @@ -532,7 +532,6 @@ class acclaim_skeleton(skeleton): self.vertices[0].meta['orientation'] = [float(parts[1]), float(parts[2]), float(parts[3])] - print self.vertices[0].meta['orientation'] lin = self.read_line(fid) return lin diff --git a/GPy/util/visualize.py b/GPy/util/visualize.py index 9754db63..9475c85b 100644 --- a/GPy/util/visualize.py +++ b/GPy/util/visualize.py @@ -3,121 +3,7 @@ from mpl_toolkits.mplot3d import Axes3D import GPy import numpy as np import matplotlib as mpl - -class lvm: - def __init__(self, model, data_visualize, latent_axes, latent_index=[0,1]): - if isinstance(latent_axes,mpl.axes.Axes): - self.cid = latent_axes.figure.canvas.mpl_connect('button_press_event', self.on_click) - self.cid = latent_axes.figure.canvas.mpl_connect('motion_notify_event', self.on_move) - self.cid = latent_axes.figure.canvas.mpl_connect('axes_leave_event', self.on_leave) - self.cid = latent_axes.figure.canvas.mpl_connect('axes_enter_event', self.on_enter) - else: - self.cid = latent_axes[0].figure.canvas.mpl_connect('button_press_event', self.on_click) - self.cid = latent_axes[0].figure.canvas.mpl_connect('motion_notify_event', self.on_move) - self.cid = latent_axes[0].figure.canvas.mpl_connect('axes_leave_event', self.on_leave) - self.cid = latent_axes[0].figure.canvas.mpl_connect('axes_enter_event', self.on_enter) - self.data_visualize = data_visualize - self.model = model - self.latent_axes = latent_axes - - self.called = False - self.move_on = False - self.latent_index = latent_index - self.latent_dim = model.Q - - def on_enter(self,event): - pass - def on_leave(self,event): - pass - - def on_click(self, event): - #print 'click', event.xdata, event.ydata - if event.inaxes!=self.latent_axes: return - self.move_on = not self.move_on - # if self.called: - # self.xs.append(event.xdata) - # self.ys.append(event.ydata) - # self.line.set_data(self.xs, self.ys) - # self.line.figure.canvas.draw() - # else: - # self.xs = [event.xdata] - # self.ys = [event.ydata] - # self.line, = self.latent_axes.plot(event.xdata, event.ydata) - self.called = True - def on_move(self, event): - if event.inaxes!=self.latent_axes: return - if self.called and self.move_on: - # Call modify code on move - #print 'move', event.xdata, event.ydata - latent_values = np.zeros((1,self.latent_dim)) - latent_values[0,self.latent_index] = np.array([event.xdata, event.ydata]) - y = self.model.predict(latent_values)[0] - self.data_visualize.modify(y) - #print 'y', y - -class lvm_subplots(lvm): - """ - latent_axes is a np array of dimension np.ceil(Q/2) + 1, - one for each pair of the axes, and the last one for the sensitiity histogram - """ - def __init__(self, model, data_visualize, latent_axes=None, latent_index=[0,1]): - self.nplots = int(np.ceil(model.Q/2.))+1 - lvm.__init__(self,model,data_visualize,latent_axes,latent_index) - self.latent_values = np.zeros(2*np.ceil(self.model.Q/2.)) # possibly an extra dimension on this - assert latent_axes.size == self.nplots - - -class lvm_dimselect(lvm): - """ - A visualizer for latent variable models - with selection by clicking on the histogram - """ - def __init__(self, model, data_visualize): - self.fig,(latent_axes,self.hist_axes) = plt.subplots(1,2) - - lvm.__init__(self,model,data_visualize,latent_axes,[0,1]) - self.latent_values_clicked = np.zeros(model.Q) - self.clicked_handle = self.latent_axes.plot([0],[0],'rx',mew=2)[0] - print "use left and right mouse butons to select dimensions" - - def on_click(self, event): - #print "click" - if event.inaxes==self.hist_axes: - self.hist_axes.cla() - self.hist_axes.bar(np.arange(self.model.Q),1./self.model.input_sensitivity(),color='b') - new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.Q-1)) - self.latent_index[(0 if event.button==1 else 1)] = new_index - self.hist_axes.bar(np.array(self.latent_index),1./self.model.input_sensitivity()[self.latent_index],color='r') - self.latent_axes.cla() - self.model.plot_latent(which_indices = self.latent_index,ax=self.latent_axes) - self.clicked_handle = self.latent_axes.plot([self.latent_values_clicked[self.latent_index[0]]],self.latent_values_clicked[self.latent_index[1]],'rx',mew=2)[0] - if event.inaxes==self.latent_axes: - self.clicked_handle.set_visible(False) - self.latent_values_clicked[self.latent_index] = np.array([event.xdata,event.ydata]) - self.clicked_handle = self.latent_axes.plot([self.latent_values_clicked[self.latent_index[0]]],self.latent_values_clicked[self.latent_index[1]],'rx',mew=2)[0] - self.fig.canvas.draw() - self.move_on=True - self.called = True - - - def on_move(self, event): - #print "move" - if event.inaxes!=self.latent_axes: return - if self.called and self.move_on: - latent_values = self.latent_values_clicked.copy() - latent_values[self.latent_index] = np.array([event.xdata, event.ydata]) - y = self.model.predict(latent_values[None,:])[0] - self.data_visualize.modify(y) - - def on_leave(self,event): - latent_values = self.latent_values_clicked.copy() - y = self.model.predict(latent_values[None,:])[0] - self.data_visualize.modify(y) - - - - - +import time class data_show: """ @@ -155,6 +41,171 @@ class vector_show(data_show): self.handle.set_data(xdata, self.vals) self.axes.figure.canvas.draw() + +class lvm(data_show): + def __init__(self, vals, model, data_visualize, latent_axes=None, latent_index=[0,1]): + """Visualize a latent variable model + + :param model: the latent variable model to visualize. + :param data_visualize: the object used to visualize the data which has been modelled. + :type data_visualize: visualize.data_show type. + :param latent_axes: the axes where the latent visualization should be plotted. + """ + if vals == None: + vals = model.X[0] + + data_show.__init__(self, vals, axes=latent_axes) + + if isinstance(latent_axes,mpl.axes.Axes): + self.cid = latent_axes.figure.canvas.mpl_connect('button_press_event', self.on_click) + self.cid = latent_axes.figure.canvas.mpl_connect('motion_notify_event', self.on_move) + self.cid = latent_axes.figure.canvas.mpl_connect('axes_leave_event', self.on_leave) + self.cid = latent_axes.figure.canvas.mpl_connect('axes_enter_event', self.on_enter) + else: + self.cid = latent_axes[0].figure.canvas.mpl_connect('button_press_event', self.on_click) + self.cid = latent_axes[0].figure.canvas.mpl_connect('motion_notify_event', self.on_move) + self.cid = latent_axes[0].figure.canvas.mpl_connect('axes_leave_event', self.on_leave) + self.cid = latent_axes[0].figure.canvas.mpl_connect('axes_enter_event', self.on_enter) + + self.data_visualize = data_visualize + self.model = model + self.latent_axes = latent_axes + + self.called = False + self.move_on = False + self.latent_index = latent_index + self.latent_dim = model.Q + + # The red cross which shows current latent point. + self.latent_values = vals + self.latent_handle = self.latent_axes.plot([0],[0],'rx',mew=2)[0] + self.modify(vals) + + def modify(self, vals): + """When latent values are modified update the latent representation and ulso update the output visualization.""" + + y = self.model.predict(vals)[0] + self.data_visualize.modify(y) + self.latent_handle.set_data(vals[self.latent_index[0]], vals[self.latent_index[1]]) + self.axes.figure.canvas.draw() + + + def on_enter(self,event): + pass + def on_leave(self,event): + pass + + def on_click(self, event): + #print 'click', event.xdata, event.ydata + if event.inaxes!=self.latent_axes: return + self.move_on = not self.move_on + # if self.called: + # self.xs.append(event.xdata) + # self.ys.append(event.ydata) + # self.line.set_data(self.xs, self.ys) + # self.line.figure.canvas.draw() + # else: + # self.xs = [event.xdata] + # self.ys = [event.ydata] + # self.line, = self.latent_axes.plot(event.xdata, event.ydata) + self.called = True + def on_move(self, event): + if event.inaxes!=self.latent_axes: return + if self.called and self.move_on: + # Call modify code on move + self.latent_values[self.latent_index[0]]=event.xdata + self.latent_values[self.latent_index[1]]=event.ydata + self.modify(self.latent_values) + +class lvm_subplots(lvm): + """ + latent_axes is a np array of dimension np.ceil(Q/2) + 1, + one for each pair of the axes, and the last one for the sensitiity histogram + """ + def __init__(self, vals, model, data_visualize, latent_axes=None, latent_index=[0,1]): + lvm.__init__(self, vals, model,data_visualize,latent_axes,[0,1]) + self.nplots = int(np.ceil(model.Q/2.))+1 + lvm.__init__(self,model,data_visualize,latent_axes,latent_index) + self.latent_values = np.zeros(2*np.ceil(self.model.Q/2.)) # possibly an extra dimension on this + assert latent_axes.size == self.nplots + + +class lvm_dimselect(lvm): + """ + A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a histogram of their length scales. + """ + def __init__(self, vals, model, data_visualize, latent_axes=None, hist_axes=None, latent_index=[0, 1]): + if latent_axes==None and hist_axes==None: + self.fig,(latent_axes,self.hist_axes) = plt.subplots(1,2) + elif hist_axes==None: + fig=plt.figure() + self.hist_axes = fig.add_subplot(111) + else: + self.hist_axes = hist_axes + + lvm.__init__(self,vals,model,data_visualize,latent_axes,latent_index) + self.draw_histogram() + print "use left and right mouse butons to select dimensions" + + def draw_histogram(self): + # A click in the histogram axis for selection a dimension. + self.hist_axes.cla() + self.hist_axes.bar(np.arange(self.model.Q),1./self.model.input_sensitivity(),color='b') + + if self.latent_index[1] == self.latent_index[0]: + self.hist_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='y') + self.hist_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='y') + + else: + self.hist_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='g') + self.hist_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='r') + + self.hist_axes.figure.canvas.draw() + + def on_click(self, event): + + if event.inaxes==self.hist_axes: + new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.Q-1)) + if event.button == 1: + # Make it red if and y-axis (red=port=left) if it is a left button click + self.latent_index[1] = new_index + else: + # Make it green and x-axis (green=starboard=right) if it is a right button click + self.latent_index[0] = new_index + + self.draw_histogram() + + self.latent_axes.cla() + self.model.plot_latent(which_indices=self.latent_index, + ax=self.latent_axes) + self.latent_handle = self.latent_axes.plot([0],[0],'rx',mew=2)[0] + self.modify(self.latent_values) + + elif event.inaxes==self.latent_axes: + self.move_on = not self.move_on + + self.called = True + + + def on_move(self, event): + #print "move" + if event.inaxes!=self.latent_axes: return + if self.called and self.move_on: + self.latent_values[self.latent_index[0]]=event.xdata + self.latent_values[self.latent_index[1]]=event.ydata + self.modify(self.latent_values) + + def on_leave(self,event): + latent_values = self.latent_values.copy() + y = self.model.predict(latent_values[None,:])[0] + self.data_visualize.modify(y) + + + + + + + class image_show(data_show): """Show a data vector as an image.""" def __init__(self, vals, axes=None, dimensions=(16,16), transpose=False, invert=False, scale=False): @@ -269,12 +320,24 @@ class stick_show(mocap_data_show): class skeleton_show(mocap_data_show): """data_show class for visualizing motion capture data encoded as a skeleton with angles.""" def __init__(self, vals, skel, padding=0, axes=None): + """data_show class for visualizing motion capture data encoded as a skeleton with angles. + :param vals: set of modeled angles to use for printing in the axis when it's first created. + :type vals: np.array + :param skel: skeleton object that has the parameters of the motion capture skeleton associated with it. + :type skel: mocap.skeleton object + :param padding: + :type int + """ self.skel = skel self.padding = padding connect = skel.connection_matrix() mocap_data_show.__init__(self, vals, axes, connect) def process_values(self, vals): + """Takes a set of angles and converts them to the x,y,z coordinates in the internal prepresentation of the class, ready for plotting. + + :param vals: the values that are being modelled.""" + if self.padding>0: channels = np.zeros((vals.shape[0], vals.shape[1]+self.padding)) channels[:, 0:vals.shape[0]] = vals @@ -296,3 +359,27 @@ class skeleton_show(mocap_data_show): if nVals[i] != nVals[j]: connect[i, j] = False return vals, connect + + +def data_play(Y, visualizer, frame_rate=30): + """Play a data set using the data_show object given. + + :Y: the data set to be visualized. + :param visualizer: the data show objectwhether to display during optimisation + :type visualizer: data_show + + Example usage: + + This example loads in the CMU mocap database (http://mocap.cs.cmu.edu) subject number 35 motion number 01. It then plays it using the mocap_show visualize object. + + data = GPy.util.datasets.cmu_mocap(subject='35', train_motions=['01']) + Y = data['Y'] + Y[:, 0:3] = 0. # Make figure walk in place + visualize = GPy.util.visualize.skeleton_show(Y[0, :], data['skel']) + GPy.util.visualize.data_play(Y, visualize) + """ + + + for y in Y: + visualizer.modify(y) + time.sleep(1./float(frame_rate)) From e941c949e54d11a7e9a00e1eed2ba8479af6007a Mon Sep 17 00:00:00 2001 From: Neil Lawrence Date: Tue, 30 Apr 2013 07:17:44 +0100 Subject: [PATCH 6/6] Minor tidy up of names in visualize (replace histogram with bar chart in lvm_dimselect). --- GPy/util/visualize.py | 51 +++++++++++++++++-------------------------- 1 file changed, 20 insertions(+), 31 deletions(-) diff --git a/GPy/util/visualize.py b/GPy/util/visualize.py index 9475c85b..bd5f112f 100644 --- a/GPy/util/visualize.py +++ b/GPy/util/visualize.py @@ -96,18 +96,8 @@ class lvm(data_show): pass def on_click(self, event): - #print 'click', event.xdata, event.ydata if event.inaxes!=self.latent_axes: return self.move_on = not self.move_on - # if self.called: - # self.xs.append(event.xdata) - # self.ys.append(event.ydata) - # self.line.set_data(self.xs, self.ys) - # self.line.figure.canvas.draw() - # else: - # self.xs = [event.xdata] - # self.ys = [event.ydata] - # self.line, = self.latent_axes.plot(event.xdata, event.ydata) self.called = True def on_move(self, event): if event.inaxes!=self.latent_axes: return @@ -120,7 +110,7 @@ class lvm(data_show): class lvm_subplots(lvm): """ latent_axes is a np array of dimension np.ceil(Q/2) + 1, - one for each pair of the axes, and the last one for the sensitiity histogram + one for each pair of the axes, and the last one for the sensitiity bar chart """ def __init__(self, vals, model, data_visualize, latent_axes=None, latent_index=[0,1]): lvm.__init__(self, vals, model,data_visualize,latent_axes,[0,1]) @@ -132,39 +122,39 @@ class lvm_subplots(lvm): class lvm_dimselect(lvm): """ - A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a histogram of their length scales. + A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a bar chart of their length scales. """ - def __init__(self, vals, model, data_visualize, latent_axes=None, hist_axes=None, latent_index=[0, 1]): - if latent_axes==None and hist_axes==None: - self.fig,(latent_axes,self.hist_axes) = plt.subplots(1,2) - elif hist_axes==None: + def __init__(self, vals, model, data_visualize, latent_axes=None, sense_axes=None, latent_index=[0, 1]): + if latent_axes==None and sense_axes==None: + self.fig,(latent_axes,self.sense_axes) = plt.subplots(1,2) + elif sense_axes==None: fig=plt.figure() - self.hist_axes = fig.add_subplot(111) + self.sense_axes = fig.add_subplot(111) else: - self.hist_axes = hist_axes + self.sense_axes = sense_axes lvm.__init__(self,vals,model,data_visualize,latent_axes,latent_index) - self.draw_histogram() + self.show_sensitivities() print "use left and right mouse butons to select dimensions" - def draw_histogram(self): - # A click in the histogram axis for selection a dimension. - self.hist_axes.cla() - self.hist_axes.bar(np.arange(self.model.Q),1./self.model.input_sensitivity(),color='b') + def show_sensitivities(self): + # A click in the bar chart axis for selection a dimension. + self.sense_axes.cla() + self.sense_axes.bar(np.arange(self.model.Q),1./self.model.input_sensitivity(),color='b') if self.latent_index[1] == self.latent_index[0]: - self.hist_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='y') - self.hist_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='y') + self.sense_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='y') + self.sense_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='y') else: - self.hist_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='g') - self.hist_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='r') + self.sense_axes.bar(np.array(self.latent_index[0]),1./self.model.input_sensitivity()[self.latent_index[0]],color='g') + self.sense_axes.bar(np.array(self.latent_index[1]),1./self.model.input_sensitivity()[self.latent_index[1]],color='r') - self.hist_axes.figure.canvas.draw() + self.sense_axes.figure.canvas.draw() def on_click(self, event): - if event.inaxes==self.hist_axes: + if event.inaxes==self.sense_axes: new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.Q-1)) if event.button == 1: # Make it red if and y-axis (red=port=left) if it is a left button click @@ -173,7 +163,7 @@ class lvm_dimselect(lvm): # Make it green and x-axis (green=starboard=right) if it is a right button click self.latent_index[0] = new_index - self.draw_histogram() + self.show_sensitivities() self.latent_axes.cla() self.model.plot_latent(which_indices=self.latent_index, @@ -188,7 +178,6 @@ class lvm_dimselect(lvm): def on_move(self, event): - #print "move" if event.inaxes!=self.latent_axes: return if self.called and self.move_on: self.latent_values[self.latent_index[0]]=event.xdata