mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-11 13:02:38 +02:00
dimensionalityreduction plotting adjusted to new syntax
This commit is contained in:
parent
312cfebcb1
commit
ffb6eb414b
3 changed files with 17 additions and 20 deletions
|
|
@ -263,7 +263,7 @@ def bgplvm_simulation(optimize='scg',
|
|||
# m.constrain('variance|noise', logexp_clipped())
|
||||
m.ensure_default_constraints()
|
||||
m['noise'] = Y.var() / 100.
|
||||
m['linear_variance'] = .001
|
||||
m['linear_variance'] = .01
|
||||
|
||||
if optimize:
|
||||
print "Optimizing model:"
|
||||
|
|
@ -271,11 +271,8 @@ def bgplvm_simulation(optimize='scg',
|
|||
max_f_eval=max_f_eval,
|
||||
messages=True, gtol=1e-6)
|
||||
if plot:
|
||||
import pylab
|
||||
m.plot_X_1d()
|
||||
pylab.figure('BGPLVM Simulation ARD Parameters');
|
||||
pylab.axis();
|
||||
m.kern.plot_ARD()
|
||||
m.plot_X_1d("BGPLVM Latent Space 1D")
|
||||
m.kern.plot_ARD('BGPLVM Simulation ARD Parameters')
|
||||
return m
|
||||
|
||||
def mrd_simulation(optimize=True, plot=True, plot_sim=True, **kw):
|
||||
|
|
|
|||
|
|
@ -241,22 +241,22 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
|
|||
x = np.arange(self.X.shape[0])
|
||||
for i in range(self.X.shape[1]):
|
||||
if ax is None:
|
||||
ax = fig.add_subplot(self.X.shape[1], 1, i + 1)
|
||||
a = fig.add_subplot(self.X.shape[1], 1, i + 1)
|
||||
elif isinstance(ax, (tuple, list)):
|
||||
ax = ax[i]
|
||||
a = ax[i]
|
||||
else:
|
||||
raise ValueError("Need one ax per latent dimnesion Q")
|
||||
ax.plot(self.X, c='k', alpha=.3)
|
||||
plots.extend(ax.plot(x, self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
|
||||
ax.fill_between(x,
|
||||
a.plot(self.X, c='k', alpha=.3)
|
||||
plots.extend(a.plot(x, self.X.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
|
||||
a.fill_between(x,
|
||||
self.X.T[i] - 2 * np.sqrt(self.X_variance.T[i]),
|
||||
self.X.T[i] + 2 * np.sqrt(self.X_variance.T[i]),
|
||||
facecolor=plots[-1].get_color(),
|
||||
alpha=.3)
|
||||
ax.legend(borderaxespad=0.)
|
||||
ax.set_xlim(x.min(), x.max())
|
||||
a.legend(borderaxespad=0.)
|
||||
a.set_xlim(x.min(), x.max())
|
||||
if i < self.X.shape[1] - 1:
|
||||
ax.set_xticklabels('')
|
||||
a.set_xticklabels('')
|
||||
pylab.draw()
|
||||
fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
|
||||
return fig
|
||||
|
|
|
|||
|
|
@ -261,12 +261,12 @@ class MRD(model):
|
|||
fig = pylab.figure(num=fignum, figsize=(4 * len(self.bgplvms), 3))
|
||||
for i, g in enumerate(self.bgplvms):
|
||||
if axes is None:
|
||||
axes = fig.add_subplot(1, len(self.bgplvms), i + 1)
|
||||
ax = fig.add_subplot(1, len(self.bgplvms), i + 1)
|
||||
elif isinstance(axes, (tuple, list)):
|
||||
axes = axes[i]
|
||||
ax = axes[i]
|
||||
else:
|
||||
raise ValueError("Need one axes per latent dimension Q")
|
||||
plotf(i, g, axes)
|
||||
plotf(i, g, ax)
|
||||
pylab.draw()
|
||||
if axes is None:
|
||||
fig.tight_layout()
|
||||
|
|
@ -282,15 +282,15 @@ class MRD(model):
|
|||
return fig
|
||||
|
||||
def plot_predict(self, fignum="MRD Predictions", ax=None, **kwargs):
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: ax.imshow(g.predict(g.X)[0], **kwargs))
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: ax.imshow(g. predict(g.X)[0], **kwargs))
|
||||
return fig
|
||||
|
||||
def plot_scales(self, fignum="MRD Scales", ax=None, *args, **kwargs):
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.kern.plot_ARD(axes=ax, *args, **kwargs))
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.kern.plot_ARD(ax=ax, *args, **kwargs))
|
||||
return fig
|
||||
|
||||
def plot_latent(self, fignum="MRD Latent Spaces", ax=None, *args, **kwargs):
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.plot_latent(axes=ax, *args, **kwargs))
|
||||
fig = self._handle_plotting(fignum, ax, lambda i, g, ax: g.plot_latent(ax=ax, *args, **kwargs))
|
||||
return fig
|
||||
|
||||
def _debug_plot(self):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue