BGPLVM MRD Examples and plotting adjustments

This commit is contained in:
Max Zwiessele 2013-05-08 15:37:21 +01:00
parent 2d9ecaa042
commit 65ead17ff5
3 changed files with 90 additions and 53 deletions

View file

@ -8,6 +8,7 @@ from matplotlib import pyplot as plt, pyplot
import GPy
from GPy.models.Bayesian_GPLVM import Bayesian_GPLVM
from GPy.util.datasets import simulation_BGPLVM
from GPy.core.transformations import square, logexp_clipped
default_seed = np.random.seed(123344)
@ -62,17 +63,21 @@ def GPLVM_oil_100(optimize=True):
m.plot_latent(labels=m.data_labels)
return m
def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300):
def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300, plot=False):
data = GPy.util.datasets.oil()
# create simple GP model
kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q) + GPy.kern.white(Q, 0.001)
m = GPy.models.Bayesian_GPLVM(data['X'][:N], Q, kernel=kernel, M=M)
kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2))
Y = data['X'][:N]
m = GPy.models.Bayesian_GPLVM(Y, Q, kernel=kernel, M=M)
m.data_labels = data['Y'][:N].argmax(axis=1)
# optimize
if optimize:
m.constrain_fixed('noise', 0.05)
m.constrain_fixed('noise', 1. / Y.var())
m.constrain('variance', logexp_clipped())
m['lengt'] = 1000
m.ensure_default_constraints()
m.optimize('scg', messages=1, max_f_eval=max(80, max_f_eval))
m.unconstrain('noise')
@ -81,12 +86,13 @@ def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300):
else:
m.ensure_default_constraints()
if plot:
y = m.likelihood.Y[0, :]
fig, (latent_axes, hist_axes) = plt.subplots(1, 2)
plt.sca(latent_axes)
m.plot_latent()
data_show = GPy.util.visualize.vector_show(y)
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes, sense_axes=sense_axes)
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes)
raw_input('Press enter to finish')
plt.close('all')
# # plot
@ -221,6 +227,8 @@ def bgplvm_simulation(burnin='scg', plot_sim=False,
# k = kern.white(Q, .00001) + kern.bias(Q)
m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k, _debug=True)
# m.set('noise',)
m.constrain('variance', logexp_clipped())
m.ensure_default_constraints()
m['noise'] = Y.var() / 100.
m['linear_variance'] = .001
@ -304,7 +312,7 @@ def mrd_simulation(plot_sim=False):
# Y2 = np.random.multivariate_normal(np.zeros(N), k.K(X), D2).T
# Y2 -= Y2.mean(0)
# make_params = lambda ard: np.hstack([[1], ard, [1, .3]])
D1, D2, D3, N, M, Q = 2000, 34, 8, 500, 3, 6
D1, D2, D3, N, M, Q = 150, 250, 300, 700, 10, 7
slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, M, Q, plot_sim)
from GPy.models import mrd
@ -316,18 +324,19 @@ def mrd_simulation(plot_sim=False):
# Y2 = np.random.multivariate_normal(np.zeros(N), k.K(S2), D2).T
# Y3 = np.random.multivariate_normal(np.zeros(N), k.K(S3), D3).T
Ylist = Ylist[0:2]
# Ylist = Ylist[0:2]
# k = kern.rbf(Q, ARD=True) + kern.bias(Q) + kern.white(Q)
k = kern.linear(Q, ARD=True) + kern.bias(Q, .01) + kern.white(Q, .001)
k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2))
m = mrd.MRD(*Ylist, Q=Q, M=M, kernel=k, initx="concat", initz='permute', _debug=False)
for i, Y in enumerate(Ylist):
m.set('{}_noise'.format(i + 1), Y.var() / 100.)
m['{}_noise'.format(i + 1)] = Y.var() / 100.
m.constrain('variance', logexp_clipped())
m.ensure_default_constraints()
m.auto_scale_factor = True
# m.auto_scale_factor = True
# cstr = 'variance'
# m.unconstrain(cstr), m.constrain_bounded(cstr, 1e-12, 1.)
@ -335,19 +344,19 @@ def mrd_simulation(plot_sim=False):
# cstr = 'linear_variance'
# m.unconstrain(cstr), m.constrain_positive(cstr)
# print "initializing beta"
# cstr = "noise"
# m.unconstrain(cstr); m.constrain_fixed(cstr)
# m.optimize('scg', messages=1, max_f_eval=100)
print "initializing beta"
cstr = "noise"
m.unconstrain(cstr); m.constrain_fixed(cstr)
m.optimize('scg', messages=1, max_f_eval=200, gtol=11)
# print "releasing beta"
# cstr = "noise"
# m.unconstrain(cstr); m.constrain_positive(cstr)
print "releasing beta"
cstr = "noise"
m.unconstrain(cstr); m.constrain(cstr, logexp_clipped())
np.seterr(all='call')
def ipdbonerr(errtype, flags):
import ipdb; ipdb.set_trace()
np.seterrcall(ipdbonerr)
# np.seterr(all='call')
# def ipdbonerr(errtype, flags):
# import ipdb; ipdb.set_trace()
# np.seterrcall(ipdbonerr)
return m # , mtest

View file

@ -54,6 +54,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
self._savedgradients = []
self._savederrors = []
self._savedpsiKmm = []
self._savedABCD = []
sparse_GP.__init__(self, X, Gaussian(Y), kernel, Z=Z, X_variance=X_variance, **kwargs)
@ -135,6 +136,17 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
self._savedparams.append([self.f_call, self._get_params()])
self._savedgradients.append([self.f_call, self._log_likelihood_gradients()])
self._savedpsiKmm.append([self.f_call, [self.Kmm, self.dL_dKmm]])
sf2 = self.scale_factor ** 2
if self.likelihood.is_heteroscedastic:
A = -0.5 * self.N * self.D * np.log(2.*np.pi) + 0.5 * np.sum(np.log(self.likelihood.precision)) - 0.5 * np.sum(self.V * self.likelihood.Y)
B = -0.5 * self.D * (np.sum(self.likelihood.precision.flatten() * self.psi0) - np.trace(self.A) * sf2)
else:
A = -0.5 * self.N * self.D * (np.log(2.*np.pi) + np.log(self.likelihood._variance)) - 0.5 * self.likelihood.precision * self.likelihood.trYYT
B = -0.5 * self.D * (np.sum(self.likelihood.precision * self.psi0) - np.trace(self.A) * sf2)
C = -self.D * (np.sum(np.log(np.diag(self.LB))) + 0.5 * self.M * np.log(sf2))
D = 0.5 * np.sum(np.square(self._LBi_Lmi_psi1V))
self._savedABCD.append([self.f_call, A, B, C, D])
# print "\nkl:", kl, "ll:", ll
return ll - kl
@ -169,7 +181,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
ax.plot(self.Z[:, input_1], self.Z[:, input_2], '^w')
return ax
def plot_X_1d(self, fig=None, axes=None, fig_num="MRD X 1d", colors=None):
def plot_X_1d(self, fig=None, axes=None, fig_num="LVM mu S 1d", colors=None):
"""
Plot latent space X in 1D:
@ -196,7 +208,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
else:
ax = axes[i]
ax.plot(self.X, c='k', alpha=.3)
plots.extend(ax.plot(self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{}}}$".format(i)))
plots.extend(ax.plot(self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
ax.fill_between(np.arange(self.X.shape[0]),
self.X.T[i] - 2 * np.sqrt(self.X_variance.T[i]),
self.X.T[i] + 2 * np.sqrt(self.X_variance.T[i]),
@ -251,6 +263,7 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
gradient_dict = dict(self._savedgradients)
kmm_dict = dict(self._savedpsiKmm)
iters = np.array(param_dict.keys())
ABCD_dict = np.array(self._savedABCD)
self.showing = 0
# ax2 = pylab.subplot2grid(splotshape, (1, 0), 2, 4)
@ -281,14 +294,26 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
ha='center', va='center')
figs[-1].canvas.draw()
figs[-1].tight_layout(rect=(.15, 0, 1, .86))
# figs.append(pylab.figure("BGPLVM DEBUG Kmm", figsize=(12, 6)))
# fig = figs[-1]
# ax6 = fig.add_subplot(121)
# ax6.text(.5, .5, r"${\mathbf{K}_{mm}}$", color='magenta', alpha=.5, transform=ax6.transAxes,
# ha='center', va='center')
# ax7 = fig.add_subplot(122)
# ax7.text(.5, .5, r"${\frac{dL}{dK_{mm}}}$", color='magenta', alpha=.5, transform=ax7.transAxes,
# ha='center', va='center')
figs.append(pylab.figure("BGPLVM DEBUG Kmm", figsize=(12, 6)))
fig = figs[-1]
ax6 = fig.add_subplot(121)
ax6.text(.5, .5, r"${\mathbf{K}_{mm}}$", color='magenta', alpha=.5, transform=ax6.transAxes,
ha='center', va='center')
ax7 = fig.add_subplot(122)
ax7.text(.5, .5, r"${\frac{dL}{dK_{mm}}}$", color='magenta', alpha=.5, transform=ax7.transAxes,
ax8 = fig.add_subplot(121)
ax8.text(.5, .5, r"${\mathbf{A,B,C,D}}$", color='k', alpha=.5, transform=ax8.transAxes,
ha='center', va='center')
ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 1], label='A')
ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 2], label='B')
ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 3], label='C')
ax8.plot(ABCD_dict[:, 0], ABCD_dict[:, 4], label='D')
ax8.legend()
figs[-1].canvas.draw()
figs[-1].tight_layout(rect=(.15, 0, 1, .86))
X, S, Z, theta = self._debug_filter_params(param_dict[self.showing])
Xg, Sg, Zg, thetag = self._debug_filter_params(gradient_dict[self.showing])
@ -330,16 +355,16 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
ax5.set_xticks(np.arange(len(theta)))
ax5.set_xticklabels(self._get_param_names()[-len(theta):], rotation=17)
imkmm = ax6.imshow(kmm_dict[self.showing][0])
from mpl_toolkits.axes_grid1 import make_axes_locatable
divider = make_axes_locatable(ax6)
caxkmm = divider.append_axes("right", "5%", pad="1%")
cbarkmm = pylab.colorbar(imkmm, cax=caxkmm)
imkmmdl = ax7.imshow(kmm_dict[self.showing][1])
divider = make_axes_locatable(ax7)
caxkmmdl = divider.append_axes("right", "5%", pad="1%")
cbarkmmdl = pylab.colorbar(imkmmdl, cax=caxkmmdl)
# imkmm = ax6.imshow(kmm_dict[self.showing][0])
# from mpl_toolkits.axes_grid1 import make_axes_locatable
# divider = make_axes_locatable(ax6)
# caxkmm = divider.append_axes("right", "5%", pad="1%")
# cbarkmm = pylab.colorbar(imkmm, cax=caxkmm)
#
# imkmmdl = ax7.imshow(kmm_dict[self.showing][1])
# divider = make_axes_locatable(ax7)
# caxkmmdl = divider.append_axes("right", "5%", pad="1%")
# cbarkmmdl = pylab.colorbar(imkmmdl, cax=caxkmmdl)
# Qleg = ax1.legend(Xlatentplts, [r"$Q_{}$".format(i + 1) for i in range(self.Q)],
# loc=3, ncol=self.Q, bbox_to_anchor=(0, 1.15, 1, 1.15),
@ -420,13 +445,13 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
thetagrads.set_offsets(np.array([xtheta.ravel(), theta.ravel()]).T)
thetagrads.set_UVC(Utheta, thetag)
imkmm.set_data(kmm_dict[self.showing][0])
imkmm.autoscale()
cbarkmm.update_normal(imkmm)
imkmmdl.set_data(kmm_dict[self.showing][1])
imkmmdl.autoscale()
cbarkmmdl.update_normal(imkmmdl)
# imkmm.set_data(kmm_dict[self.showing][0])
# imkmm.autoscale()
# cbarkmm.update_normal(imkmm)
#
# imkmmdl.set_data(kmm_dict[self.showing][1])
# imkmmdl.autoscale()
# cbarkmmdl.update_normal(imkmmdl)
ax2.relim()
# ax3.relim()
@ -452,4 +477,4 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
cidp = figs[0].canvas.mpl_connect('button_press_event', onclick)
cidd = figs[0].canvas.mpl_connect('motion_notify_event', motion)
return ax1, ax2, ax3, ax4, ax5, ax6, ax7
return ax1, ax2, ax3, ax4, ax5 # , ax6, ax7

View file

@ -287,6 +287,9 @@ class MRD(model):
else:
return pylab.gcf()
def plot_X_1d(self):
return self.gref.plot_X_1d()
def plot_X(self, fig_num="MRD Predictions", axes=None):
fig = self._handle_plotting(fig_num, axes, lambda i, g, ax: ax.imshow(g.X))
return fig