fixed the bug in stick

This commit is contained in:
James Hensman 2013-05-13 10:26:54 +01:00
parent b273c76fd7
commit 529d7534ca
5 changed files with 41 additions and 36 deletions

View file

@ -92,7 +92,7 @@ def BGPLVM_oil(optimize=True, N=100, Q=10, M=20, max_f_eval=300, plot=False):
plt.sca(latent_axes)
m.plot_latent()
data_show = GPy.util.visualize.vector_show(y)
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes)
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :].copy(), m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes)
raw_input('Press enter to finish')
plt.close('all')
# # plot
@ -376,7 +376,7 @@ def brendan_faces():
ax = m.plot_latent()
y = m.likelihood.Y[0, :]
data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, invert=False, scale=False)
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax)
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
plt.close('all')
@ -389,11 +389,12 @@ def stick():
# optimize
m.ensure_default_constraints()
m.optimize(messages=1, max_f_eval=10000)
m._set_params(m._get_params())
ax = m.plot_latent()
y = m.likelihood.Y[0, :]
data_show = GPy.util.visualize.stick_show(y[None, :], connect=data['connect'])
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax)
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
plt.close('all')
@ -415,7 +416,7 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True):
ax = m.plot_latent()
y = m.likelihood.Y[0, :]
data_show = GPy.util.visualize.skeleton_show(y[None, :], data['skel'])
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :], m, data_show, ax)
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
plt.close('all')

View file

@ -38,7 +38,6 @@ class bias(kernpart):
def dK_dtheta(self,dL_dKdiag,X,X2,target):
target += dL_dKdiag.sum()
def dKdiag_dtheta(self,dL_dKdiag,X,target):
target += dL_dKdiag.sum()

View file

@ -252,7 +252,7 @@ class kern(parameterised):
which_parts = [True]*self.Nparts
assert X.shape[1] == self.D
target = np.zeros(X.shape[0])
[p.Kdiag(X[:, i_s], target=target) for p, i_s in zip(self.parts, self.input_slices)]
[p.Kdiag(X[:, i_s], target=target) for p, i_s, part_on in zip(self.parts, self.input_slices, which_parts) if part_on]
return target
def dKdiag_dtheta(self, dL_dKdiag, X):

View file

@ -3,10 +3,11 @@
import numpy as np
from scipy import linalg
import pylab as pb
from .. import kern
from ..core import model
from ..util.linalg import pdinv, mdot
from ..util.linalg import pdinv, mdot, tdot
from ..util.plot import gpplot, x_frame1D, x_frame2D, Tango
from ..likelihoods import EP
@ -58,13 +59,12 @@ class GP(model):
"""
TODO: one day we might like to learn Z by gradient methods?
"""
#FIXME: this doesn;t live here.
return np.zeros_like(self.Z)
def _set_params(self, p):
self.kern._set_params_transformed(p[:self.kern.Nparam_transformed()])
# self.likelihood._set_params(p[self.kern.Nparam:]) # test by Nicolas
self.likelihood._set_params(p[self.kern.Nparam_transformed():]) # test by Nicolas
self.likelihood._set_params(p[self.kern.Nparam_transformed():])
self.K = self.kern.K(self.X)
self.K += self.likelihood.covariance_matrix
@ -73,10 +73,14 @@ class GP(model):
# the gradient of the likelihood wrt the covariance matrix
if self.likelihood.YYT is None:
alpha = np.dot(self.Ki, self.likelihood.Y)
self.dL_dK = 0.5 * (np.dot(alpha, alpha.T) - self.D * self.Ki)
#alpha = np.dot(self.Ki, self.likelihood.Y)
alpha,_ = linalg.lapack.flapack.dpotrs(self.L, self.likelihood.Y,lower=1)
self.dL_dK = 0.5 * (tdot(alpha) - self.D * self.Ki)
else:
tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki)
#tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki)
tmp, _ = linalg.lapack.flapack.dpotrs(self.L, np.asfortranarray(self.likelihood.YYT), lower=1)
tmp, _ = linalg.lapack.flapack.dpotrs(self.L, np.asfortranarray(tmp.T), lower=1)
self.dL_dK = 0.5 * (tmp - self.D * self.Ki)
def _get_params(self):
@ -100,7 +104,9 @@ class GP(model):
Computes the model fit using YYT if it's available
"""
if self.likelihood.YYT is None:
return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y)))
tmp, _ = linalg.lapack.flapack.dtrtrs(self.L, np.asfortranarray(self.likelihood.Y), lower=1)
return -0.5 * np.sum(np.square(tmp))
#return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y)))
else:
return -0.5 * np.sum(np.multiply(self.Ki, self.likelihood.YYT))
@ -123,18 +129,15 @@ class GP(model):
"""
return np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK))))
def _raw_predict(self, _Xnew, which_parts='all', full_cov=False):
def _raw_predict(self, _Xnew, which_parts='all', full_cov=False,stop=False):
"""
Internal helper function for making predictions, does not account
for normalization or likelihood
#TODO: which_parts does nothing
"""
Kx = self.kern.K(self.X, _Xnew,which_parts=which_parts)
mu = np.dot(np.dot(Kx.T, self.Ki), self.likelihood.Y)
KiKx = np.dot(self.Ki, Kx)
Kx = self.kern.K(_Xnew,self.X,which_parts=which_parts).T
#KiKx = np.dot(self.Ki, Kx)
KiKx, _ = linalg.lapack.flapack.dpotrs(self.L, np.asfortranarray(Kx), lower=1)
mu = np.dot(KiKx.T, self.likelihood.Y)
if full_cov:
Kxx = self.kern.K(_Xnew, which_parts=which_parts)
var = Kxx - np.dot(KiKx.T, Kx)
@ -142,6 +145,8 @@ class GP(model):
Kxx = self.kern.Kdiag(_Xnew, which_parts=which_parts)
var = Kxx - np.sum(np.multiply(KiKx, Kx), 0)
var = var[:, None]
if stop:
debug_this
return mu, var
@ -178,7 +183,8 @@ class GP(model):
def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False):
"""
Plot the GP's view of the world, where the data is normalized and the likelihood is Gaussian
Plot the GP's view of the world, where the data is normalized and the
likelihood is Gaussian.
:param samples: the number of a posteriori samples to plot
:param which_data: which if the training data to plot (default all)
@ -193,8 +199,8 @@ class GP(model):
- In two dimsensions, a contour-plot shows the mean predicted function
- In higher dimensions, we've no implemented this yet !TODO!
Can plot only part of the data and part of the posterior functions using which_data and which_functions
Plot the data's view of the world, with non-normalized values and GP predictions passed through the likelihood
Can plot only part of the data and part of the posterior functions
using which_data and which_functions
"""
if which_data == 'all':
which_data = slice(None)

View file

@ -14,7 +14,7 @@ class data_show:
"""
def __init__(self, vals, axes=None):
self.vals = vals
self.vals = vals.copy()
# If no axes are defined, create some.
if axes==None:
fig = plt.figure()
@ -32,12 +32,12 @@ class vector_show(data_show):
"""
def __init__(self, vals, axes=None):
data_show.__init__(self, vals, axes)
self.vals = vals.T
self.vals = vals.T.copy()
self.handle = self.axes.plot(np.arange(0, len(vals))[:, None], self.vals)[0]
def modify(self, vals):
xdata, ydata = self.handle.get_data()
self.vals = vals.T
self.vals = vals.T.copy()
self.handle.set_data(xdata, self.vals)
self.axes.figure.canvas.draw()
@ -52,7 +52,7 @@ class lvm(data_show):
:param latent_axes: the axes where the latent visualization should be plotted.
"""
if vals == None:
vals = model.X[0]
vals = model.X[0].copy()
data_show.__init__(self, vals, axes=latent_axes)
@ -76,14 +76,13 @@ class lvm(data_show):
self.latent_index = latent_index
self.latent_dim = model.Q
# The red cross which shows current latent point.
# The red cross which shows current latent point.
self.latent_values = vals
self.latent_handle = self.latent_axes.plot([0],[0],'rx',mew=2)[0]
self.modify(vals)
def modify(self, vals):
"""When latent values are modified update the latent representation and ulso update the output visualization."""
y = self.model.predict(vals)[0]
self.data_visualize.modify(y)
self.latent_handle.set_data(vals[self.latent_index[0]], vals[self.latent_index[1]])
@ -104,7 +103,7 @@ class lvm(data_show):
if self.called and self.move_on:
# Call modify code on move
self.latent_values[self.latent_index[0]]=event.xdata
self.latent_values[self.latent_index[1]]=event.ydata
self.latent_values[self.latent_index[1]]=event.ydata
self.modify(self.latent_values)
class lvm_subplots(lvm):
@ -216,11 +215,11 @@ class image_show(data_show):
plt.show()
def set_image(self, vals):
self.vals = np.reshape(vals, self.dimensions, order='F')
self.vals = np.reshape(vals, self.dimensions, order='F').copy()
if self.transpose:
self.vals = self.vals.T
self.vals = self.vals.T.copy()
if not self.scale:
self.vals = self.vals
self.vals = self.vals.copy()
#if self.invert:
# self.vals = -self.vals
@ -304,7 +303,7 @@ class stick_show(mocap_data_show):
mocap_data_show.__init__(self, vals, axes, connect)
def process_values(self, vals):
self.vals = vals.reshape((3, vals.shape[1]/3)).T
self.vals = vals.reshape((3, vals.shape[1]/3)).T.copy()
class skeleton_show(mocap_data_show):
"""data_show class for visualizing motion capture data encoded as a skeleton with angles."""