merge branch 'params' of github.com:SheffieldML/GPy into params

This commit is contained in:
James Hensman 2014-01-28 14:29:11 +00:00
commit dc085c8c68
33 changed files with 734 additions and 634 deletions

View file

@ -2,10 +2,9 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np import numpy as np
import pylab as pb import sys
import warnings import warnings
from .. import kern from .. import kern
from ..util.plot import gpplot, Tango, x_frame1D, x_frame2D
from ..util.linalg import dtrtrs from ..util.linalg import dtrtrs
from model import Model from model import Model
from parameterization import ObservableArray from parameterization import ObservableArray
@ -122,9 +121,9 @@ class GP(Model):
:param X: The points at which to take the samples. :param X: The points at which to take the samples.
:type X: np.ndarray, Nnew x self.input_dim. :type X: np.ndarray, Nnew x self.input_dim.
:param size: the number of a posteriori samples to plot. :param size: the number of a posteriori samples.
:type size: int. :type size: int.
:param which_parts: which of the kernel functions to plot (additively). :param which_parts: which of the kernel functions to use (additively).
:type which_parts: 'all', or list of bools. :type which_parts: 'all', or list of bools.
:param full_cov: whether to return the full covariance matrix, or just the diagonal. :param full_cov: whether to return the full covariance matrix, or just the diagonal.
:type full_cov: bool. :type full_cov: bool.
@ -145,9 +144,9 @@ class GP(Model):
:param X: the points at which to take the samples. :param X: the points at which to take the samples.
:type X: np.ndarray, Nnew x self.input_dim. :type X: np.ndarray, Nnew x self.input_dim.
:param size: the number of a posteriori samples to plot. :param size: the number of a posteriori samples.
:type size: int. :type size: int.
:param which_parts: which of the kernel functions to plot (additively). :param which_parts: which of the kernel functions to use (additively).
:type which_parts: 'all', or list of bools. :type which_parts: 'all', or list of bools.
:param full_cov: whether to return the full covariance matrix, or just the diagonal. :param full_cov: whether to return the full covariance matrix, or just the diagonal.
:type full_cov: bool. :type full_cov: bool.
@ -172,20 +171,13 @@ class GP(Model):
""" """
Plot the GP's view of the world, where the data is normalized and before applying a likelihood. Plot the GP's view of the world, where the data is normalized and before applying a likelihood.
This is a convenience function: we simply call self.plot with the This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_f_fit
argument use_raw_predict set True. All args and kwargs are passed on to
plot.
see also: gp.plot
""" """
kwargs['plot_raw'] = True assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
self.plot(*args, **kwargs) from ..plotting.matplot_dep import models_plots
models_plots.plot_fit_f(self,*args,**kwargs)
def plot(self, plot_limits=None, which_data_rows='all', def plot(self, *args):
which_data_ycols='all', which_parts='all', fixed_inputs=[],
levels=20, samples=0, fignum=None, ax=None, resolution=None,
plot_raw=False,
linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']):
""" """
Plot the posterior of the GP. Plot the posterior of the GP.
- In one dimension, the function is plotted with a shaded region identifying two standard deviations. - In one dimension, the function is plotted with a shaded region identifying two standard deviations.
@ -193,121 +185,13 @@ class GP(Model):
- In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed.
Can plot only part of the data and part of the posterior functions Can plot only part of the data and part of the posterior functions
using which_data_rowsm which_data_ycols and which_parts using which_data_rows which_data_ycols and which_parts
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_fit
:type plot_limits: np.array
:param which_data_rows: which of the training data to plot (default all)
:type which_data_rows: 'all' or a slice object to slice self.X, self.Y
:param which_data_ycols: when the data has several columns (independant outputs), only plot these
:type which_data_rows: 'all' or a list of integers
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.
:type fixed_inputs: a list of tuples
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param levels: number of levels to plot in a contour plot.
:type levels: int
:param samples: the number of a posteriori samples to plot
:type samples: int
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:type output: integer (first output is 0)
:param linecol: color of line to plot.
:type linecol:
:param fillcol: color of fill
:param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure
""" """
#deal with optional arguments assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
if which_data_rows == 'all': from ..plotting.matplot_dep import models_plots
which_data_rows = slice(None) models_plots.plot_fit(self,*args)
if which_data_ycols == 'all':
which_data_ycols = np.arange(self.output_dim)
if len(which_data_ycols)==0:
raise ValueError('No data selected for plotting')
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
#work out what the inputs are for plotting (1D or 2D)
fixed_dims = np.array([i for i,v in fixed_inputs])
free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims)
#one dimensional plotting
if len(free_dims) == 1:
#define the frame on which to plot
resolution = resolution or 200
Xnew, xmin, xmax = x_frame1D(self.X[:,free_dims], plot_limits=plot_limits)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
#make a prediction on the frame and plot it
if plot_raw:
m, v = self._raw_predict(Xgrid, which_parts=which_parts)
lower = m - 2*np.sqrt(v)
upper = m + 2*np.sqrt(v)
Y = self.Y
else:
m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts)
Y = self.Y
for d in which_data_ycols:
gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol)
ax.plot(self.X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5)
#optionally plot some samples
if samples: #NOTE not tested with fixed_inputs
Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts)
for yi in Ysim.T:
ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25)
#ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs.
#set the limits of the plot to some sensible values
ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper))
ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin)
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
#2D plotting
elif len(free_dims) == 2:
#define the frame for plotting on
resolution = resolution or 50
Xnew, _, _, xmin, xmax = x_frame2D(self.X[:,free_dims], plot_limits, resolution)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution)
#predict on the frame and plot
if plot_raw:
m, _ = self._raw_predict(Xgrid, which_parts=which_parts)
Y = self.likelihood.Y
else:
m, _, _, _ = self.predict(Xgrid, which_parts=which_parts,sampling=False)
Y = self.likelihood.data
for d in which_data_ycols:
m_d = m[:,d].reshape(resolution, resolution).T
ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet)
ax.scatter(self.X[which_data_rows, free_dims[0]], self.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.)
#set the limits of the plot to some sensible values
ax.set_xlim(xmin[0], xmax[0])
ax.set_ylim(xmin[1], xmax[1])
if samples:
warnings.warn("Samples are rather difficult to plot for 2D inputs...")
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
def _getstate(self): def _getstate(self):
""" """
@ -333,5 +217,3 @@ class GP(Model):
self.num_data = state.pop() self.num_data = state.pop()
self.X = state.pop() self.X = state.pop()
Model._setstate(self, state) Model._setstate(self, state)

View file

@ -1,10 +1,9 @@
# Copyright (c) 2013, GPy authors (see AUTHORS.txt). # Copyright (c) 2013, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
from ..util.plot import Tango, x_frame1D, x_frame2D import sys
from parameterization import Parameterized from parameterization import Parameterized
import numpy as np import numpy as np
import pylab as pb
class Mapping(Parameterized): class Mapping(Parameterized):
""" """
@ -47,11 +46,8 @@ class Mapping(Parameterized):
raise NotImplementedError raise NotImplementedError
def plot(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue']): def plot(self, *args):
""" """
Plot the mapping.
Plots the mapping associated with the model. Plots the mapping associated with the model.
- In one dimension, the function is plotted. - In one dimension, the function is plotted.
- In two dimsensions, a contour-plot shows the function - In two dimsensions, a contour-plot shows the function
@ -60,68 +56,15 @@ class Mapping(Parameterized):
Can plot only part of the data and part of the posterior functions Can plot only part of the data and part of the posterior functions
using which_data and which_functions using which_data and which_functions
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_mapping
:type plot_limits: np.array
:param which_data: which if the training data to plot (default all)
:type which_data: 'all' or a slice object to slice self.X, self.Y
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param levels: number of levels to plot in a contour plot.
:type levels: int
:param samples: the number of a posteriori samples to plot
:type samples: int
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.
:type fixed_inputs: a list of tuples
:param linecol: color of line to plot.
:type linecol:
:param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure
""" """
# TODO include samples
if which_data == 'all':
which_data = slice(None)
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
plotdims = self.input_dim - len(fixed_inputs)
if plotdims == 1:
Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now
fixed_dims = np.array([i for i,v in fixed_inputs])
freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims)
Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,freedim] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
f = self.predict(Xgrid, which_parts=which_parts)
for d in range(y.shape[1]):
ax.plot(Xnew, f[:, d], edgecol=linecol)
elif self.X.shape[1] == 2:
resolution = resolution or 50
Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution)
x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution)
f = self.predict(Xnew, which_parts=which_parts)
m = m.reshape(resolution, resolution).T
ax.contour(x, y, f, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable
ax.set_xlim(xmin[0], xmax[0])
ax.set_ylim(xmin[1], xmax[1])
if "matplotlib" in sys.modules:
from ..plotting.matplot_dep import models_plots
mapping_plots.plot_mapping(self,*args)
else: else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions" raise NameError, "matplotlib package has not been imported."
from model import Model from model import Model
@ -135,14 +78,14 @@ class Mapping_check_model(Model):
X = np.random.randn(num_samples, mapping.input_dim) X = np.random.randn(num_samples, mapping.input_dim)
if dL_df==None: if dL_df==None:
dL_df = np.ones((num_samples, mapping.output_dim)) dL_df = np.ones((num_samples, mapping.output_dim))
self.mapping=mapping self.mapping=mapping
self.X = X self.X = X
self.dL_df = dL_df self.dL_df = dL_df
self.num_params = self.mapping.num_params self.num_params = self.mapping.num_params
Model.__init__(self) Model.__init__(self)
def _get_params(self): def _get_params(self):
return self.mapping._get_params() return self.mapping._get_params()
@ -157,7 +100,7 @@ class Mapping_check_model(Model):
def _log_likelihood_gradients(self): def _log_likelihood_gradients(self):
raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class." raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class."
class Mapping_check_df_dtheta(Mapping_check_model): class Mapping_check_df_dtheta(Mapping_check_model):
"""This class allows gradient checks for the gradient of a mapping with respect to parameters. """ """This class allows gradient checks for the gradient of a mapping with respect to parameters. """
def __init__(self, mapping=None, dL_df=None, X=None): def __init__(self, mapping=None, dL_df=None, X=None):
@ -175,13 +118,13 @@ class Mapping_check_df_dX(Mapping_check_model):
if dL_df==None: if dL_df==None:
dL_df = np.ones((self.X.shape[0],self.mapping.output_dim)) dL_df = np.ones((self.X.shape[0],self.mapping.output_dim))
self.num_params = self.X.shape[0]*self.mapping.input_dim self.num_params = self.X.shape[0]*self.mapping.input_dim
def _log_likelihood_gradients(self): def _log_likelihood_gradients(self):
return self.mapping.df_dX(self.dL_df, self.X).flatten() return self.mapping.df_dX(self.dL_df, self.X).flatten()
def _get_param_names(self): def _get_param_names(self):
return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])]
def _get_params(self): def _get_params(self):
return self.X.flatten() return self.X.flatten()

View file

@ -3,7 +3,6 @@
import numpy as np import numpy as np
import pylab as pb
from scipy.special import gammaln, digamma from scipy.special import gammaln, digamma
from ...util.linalg import pdinv from ...util.linalg import pdinv
from domains import _REAL, _POSITIVE from domains import _REAL, _POSITIVE
@ -12,16 +11,14 @@ import weakref
class Prior: class Prior:
domain = None domain = None
def pdf(self, x): def pdf(self, x):
return np.exp(self.lnpdf(x)) return np.exp(self.lnpdf(x))
def plot(self): def plot(self):
rvs = self.rvs(1000) assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
pb.hist(rvs, 100, normed=True) from ..plotting.matplot_dep import priors_plots
xmin, xmax = pb.xlim() priors_plots.univariate_plot(self)
xx = np.linspace(xmin, xmax, 1000)
pb.plot(xx, self.pdf(xx), 'r', linewidth=2)
class Gaussian(Prior): class Gaussian(Prior):
@ -153,16 +150,9 @@ class MultivariateGaussian:
return np.random.multivariate_normal(self.mu, self.var, n) return np.random.multivariate_normal(self.mu, self.var, n)
def plot(self): def plot(self):
if self.input_dim == 2: assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
rvs = self.rvs(200) from ..plotting.matplot_dep import priors_plots
pb.plot(rvs[:, 0], rvs[:, 1], 'kx', mew=1.5) priors_plots.multivariate_plot(self)
xmin, xmax = pb.xlim()
ymin, ymax = pb.ylim()
xx, yy = np.mgrid[xmin:xmax:100j, ymin:ymax:100j]
xflat = np.vstack((xx.flatten(), yy.flatten())).T
zz = self.pdf(xflat).reshape(100, 100)
pb.contour(xx, yy, zz, linewidths=2)
def gamma_from_EV(E, V): def gamma_from_EV(E, V):
warnings.warn("use Gamma.from_EV to create Gamma Prior", FutureWarning) warnings.warn("use Gamma.from_EV to create Gamma Prior", FutureWarning)

View file

@ -11,7 +11,7 @@ from ...util.misc import param_to_array
class Normal(Parameterized): class Normal(Parameterized):
''' '''
Normal distribution for variational approximations. Normal distribution for variational approximations.
holds the means and variances for a factorizing multivariate normal distribution holds the means and variances for a factorizing multivariate normal distribution
''' '''
def __init__(self, means, variances, name='latent space'): def __init__(self, means, variances, name='latent space'):
@ -20,47 +20,12 @@ class Normal(Parameterized):
self.variances = Param('variance', variances) self.variances = Param('variance', variances)
self.add_parameters(self.means, self.variances) self.add_parameters(self.means, self.variances)
def plot(self, fignum=None, ax=None, colors=None): def plot(self, *args):
""" """
Plot latent space X in 1D: Plot latent space X in 1D:
- if fig is given, create input_dim subplots in fig and plot in these See GPy.plotting.matplot_dep.variational_plots
- if ax is given plot input_dim 1D latent space plots of X into each `axis`
- if neither fig nor ax is given create a figure with fignum and plot in there
colors:
colors of different latent space dimensions input_dim
""" """
import pylab assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
if ax is None: from ..plotting.matplot_dep import variational_plots
fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.means.shape[1])))) return variational_plots.plot(self,*args)
if colors is None:
colors = pylab.gca()._get_lines.color_cycle
pylab.clf()
else:
colors = iter(colors)
plots = []
means, variances = param_to_array(self.means, self.variances)
x = np.arange(means.shape[0])
for i in range(means.shape[1]):
if ax is None:
a = fig.add_subplot(means.shape[1], 1, i + 1)
elif isinstance(ax, (tuple, list)):
a = ax[i]
else:
raise ValueError("Need one ax per latent dimension input_dim")
a.plot(means, c='k', alpha=.3)
plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
a.fill_between(x,
means.T[i] - 2 * np.sqrt(variances.T[i]),
means.T[i] + 2 * np.sqrt(variances.T[i]),
facecolor=plots[-1].get_color(),
alpha=.3)
a.legend(borderaxespad=0.)
a.set_xlim(x.min(), x.max())
if i < means.shape[1] - 1:
a.set_xticklabels('')
pylab.draw()
fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
return fig

View file

@ -2,7 +2,7 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np import numpy as np
import pylab as pb from ..util.linalg import mdot, tdot, symmetrify, backsub_both_sides, chol_inv, dtrtrs, dpotrs, dpotri
from gp import GP from gp import GP
from parameterization.param import Param from parameterization.param import Param
from ..inference.latent_function_inference import varDTC from ..inference.latent_function_inference import varDTC
@ -73,83 +73,6 @@ class SparseGP(GP):
#TODO!!! #TODO!!!
def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None):
"""
Plot the belief in the latent function, the "GP's view of the world"
- In one dimension, the function is plotted with a shaded region identifying two standard deviations.
- In two dimsensions, a contour-plot shows the mean predicted function
- Not implemented in higher dimensions
:param samples: the number of a posteriori samples to plot
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits
:param which_data: which if the training data to plot (default all)
:type which_data: 'all' or a slice object to slice self.X, self.Y
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param full_cov:
:type full_cov: bool
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:param output: which output to plot (for multiple output models only)
:type output: integer (first output is 0)
"""
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
if fignum is None and ax is None:
fignum = fig.num
if which_data is 'all':
which_data = slice(None)
GP.plot_f(self, samples=samples, plot_limits=plot_limits, which_data='all', which_parts='all', resolution=resolution, full_cov=full_cov, fignum=fignum, ax=ax)
if self.X.shape[1] == 1:
if self.has_uncertain_inputs:
ax.errorbar(self.X[which_data, 0], self.likelihood.data[which_data, 0],
xerr=2 * np.sqrt(self.X_variance[which_data, 0]),
ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
Zu = self.Z * self._Xscale + self._Xoffset
ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12)
elif self.X.shape[1] == 2:
Zu = self.Z * self._Xscale + self._Xoffset
ax.plot(Zu[:, 0], Zu[:, 1], 'wo')
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
def plot(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, fignum=None, ax=None):
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
if fignum is None and ax is None:
fignum = fig.num
if which_data is 'all':
which_data = slice(None)
GP.plot(self, samples=samples, plot_limits=plot_limits, which_data='all', which_parts='all', resolution=resolution, levels=20, fignum=fignum, ax=ax)
if self.X.shape[1] == 1:
if self.has_uncertain_inputs:
ax.errorbar(self.X[which_data, 0], self.likelihood.data[which_data, 0],
xerr=2 * np.sqrt(self.X_variance[which_data, 0]),
ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
Zu = self.Z * self._Xscale + self._Xoffset
ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12)
elif self.X.shape[1] == 2:
Zu = self.Z * self._Xscale + self._Xoffset
ax.plot(Zu[:, 0], Zu[:, 1], 'wo')
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
def _getstate(self): def _getstate(self):
""" """
Get the current state of the class, Get the current state of the class,
@ -166,4 +89,3 @@ class SparseGP(GP):
self.num_inducing = state.pop() self.num_inducing = state.pop()
self.Z = state.pop() self.Z = state.pop()
GP._setstate(self, state) GP._setstate(self, state)

View file

@ -2,7 +2,6 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np import numpy as np
import pylab as pb
from ..util.linalg import pdinv, mdot, tdot, dpotrs, dtrtrs, jitchol, backsub_both_sides from ..util.linalg import pdinv, mdot, tdot, dpotrs, dtrtrs, jitchol, backsub_both_sides
from gp import GP from gp import GP
import time import time
@ -480,38 +479,19 @@ class SVIGP(GP):
return self.q_u_canonical_flat return self.q_u_canonical_flat
def plot(self, ax=None, fignum=None, Z_height=None, **kwargs): def plot(self, *args, **kwargs):
"""
See GPy.plotting.matplot_dep.svig_plots.plot
"""
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ..plotting.matplot_dep import svig_plots
svig_plots.plot(self,*args,**kwargs)
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
#horrible hack here:
data = self.likelihood.data.copy()
self.likelihood.data = self.Y
GP.plot(self, ax=ax, **kwargs)
self.likelihood.data = data
Zu = self.Z * self._Xscale + self._Xoffset
if self.input_dim==1:
ax.plot(self.X_batch, self.likelihood.data, 'gx',mew=2)
if Z_height is None:
Z_height = ax.get_ylim()[0]
ax.plot(Zu, np.zeros_like(Zu) + Z_height, 'r|', mew=1.5, markersize=12)
if self.input_dim==2:
ax.scatter(self.X[:,0], self.X[:,1], 20., self.Y[:,0], linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable
ax.plot(Zu[:,0], Zu[:,1], 'w^')
def plot_traces(self): def plot_traces(self):
pb.figure() """
t = np.array(self._param_trace) See GPy.plotting.matplot_dep.svig_plots.plot_traces
pb.subplot(2,1,1) """
for l,ti in zip(self._get_param_names(),t.T): assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
if not l[:3]=='iip': from ..plotting.matplot_dep import svig_plots
pb.plot(ti,label=l) svig_plots.plot_traces(self)
pb.legend(loc=0)
pb.subplot(2,1,2)
pb.plot(np.asarray(self._ll_trace),label='stochastic likelihood')
pb.legend(loc=0)

View file

@ -1,7 +1,6 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
import datetime as dt import datetime as dt
from scipy import optimize from scipy import optimize
from warnings import warn from warnings import warn
@ -57,13 +56,13 @@ class Optimizer():
raise NotImplementedError, "this needs to be implemented to use the optimizer class" raise NotImplementedError, "this needs to be implemented to use the optimizer class"
def plot(self): def plot(self):
if self.trace == None: """
print "No trace present so I can't plot it. Please check that the optimizer actually supplies a trace." See GPy.plotting.matplot_dep.inference_plots
else: """
pb.figure() assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
pb.plot(self.trace) from ..plotting.matplot_dep import inference_plots
pb.xlabel('Iteration') inference_plots.plot_optimizer(self)
pb.ylabel('f(x)')
def __str__(self): def __str__(self):
diagnostics = "Optimizer: \t\t\t\t %s\n" % self.opt_name diagnostics = "Optimizer: \t\t\t\t %s\n" % self.opt_name

View file

@ -4,7 +4,6 @@
import numpy as np import numpy as np
from scipy import linalg, optimize from scipy import linalg, optimize
import pylab as pb
import Tango import Tango
import sys import sys
import re import re
@ -80,6 +79,3 @@ class Metropolis_Hastings:
fs.append(function(*args)) fs.append(function(*args))
self.model._set_params(param)# reset model to starting state self.model._set_params(param)# reset model to starting state
return fs return fs

View file

@ -3,7 +3,6 @@ import scipy as sp
import scipy.sparse import scipy.sparse
from optimization import Optimizer from optimization import Optimizer
from scipy import linalg, optimize from scipy import linalg, optimize
import pylab as plt
import copy, sys, pickle import copy, sys, pickle
class opt_SGD(Optimizer): class opt_SGD(Optimizer):
@ -68,16 +67,12 @@ class opt_SGD(Optimizer):
return status return status
def plot_traces(self): def plot_traces(self):
plt.figure() """
plt.subplot(211) See GPy.plotting.matplot_dep.inference_plots
plt.title('Parameters') """
for k in self.param_traces.keys(): assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
plt.plot(self.param_traces[k], label=k) from ..plotting.matplot_dep import inference_plots
plt.legend(loc=0) inference_plots.plot_sgd_traces(self)
plt.subplot(212)
plt.title('Objective function')
plt.plot(self.fopt_trace)
def non_null_samples(self, data): def non_null_samples(self, data):
return (np.isnan(data).sum(axis=1) == 0) return (np.isnan(data).sum(axis=1) == 0)
@ -289,7 +284,6 @@ class opt_SGD(Optimizer):
b = len(features)/self.batch_size b = len(features)/self.batch_size
features = [features[i::b] for i in range(b)] features = [features[i::b] for i in range(b)]
NLL = [] NLL = []
import pylab as plt
for count, j in enumerate(features): for count, j in enumerate(features):
self.Model.input_dim = len(j) self.Model.input_dim = len(j)
self.Model.likelihood.input_dim = len(j) self.Model.likelihood.input_dim = len(j)
@ -322,9 +316,6 @@ class opt_SGD(Optimizer):
self.adapt_learning_rate(it+count, D) self.adapt_learning_rate(it+count, D)
NLL.append(f) NLL.append(f)
self.fopt_trace.append(NLL[-1]) self.fopt_trace.append(NLL[-1])
# fig = plt.figure('traces')
# plt.clf()
# plt.plot(self.param_traces['noise'])
# for k in self.param_traces.keys(): # for k in self.param_traces.keys():
# self.param_traces[k].append(self.Model.get(k)[0]) # self.param_traces[k].append(self.Model.get(k)[0])

View file

@ -3,9 +3,7 @@
import sys import sys
import numpy as np import numpy as np
import pylab as pb
import itertools import itertools
from matplotlib.transforms import offset_copy
from parts.prod import Prod as prod from parts.prod import Prod as prod
from parts.linear import Linear from parts.linear import Linear
from parts.kernpart import Kernpart from parts.kernpart import Kernpart
@ -71,77 +69,14 @@ class kern(Parameterized):
Parameterized._setstate(self, state) Parameterized._setstate(self, state)
def plot_ARD(self, fignum=None, ax=None, title='', legend=False): def plot_ARD(self, *args):
"""If an ARD kernel is present, plot a bar representation using matplotlib """If an ARD kernel is present, plot a bar representation using matplotlib
:param fignum: figure number of the plot See GPy.plotting.matplot_dep.plot_ARD
:param ax: matplotlib axis to plot on
:param title:
title of the plot,
pass '' to not print a title
pass None for a generic title
""" """
if ax is None: assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
fig = pb.figure(fignum) from ..plotting.matplot_dep import kernel_plots
ax = fig.add_subplot(111) return kernel_plots.plot_ARD(self,*args)
else:
fig = ax.figure
from GPy.util import Tango
from matplotlib.textpath import TextPath
Tango.reset()
xticklabels = []
bars = []
x0 = 0
for p in self._parameters_:
c = Tango.nextMedium()
if hasattr(p, 'ARD') and p.ARD:
if title is None:
ax.set_title('ARD parameters, %s kernel' % p.name)
else:
ax.set_title(title)
if isinstance(p, Linear):
ard_params = p.variances
else:
ard_params = 1. / p.lengthscale
x = np.arange(x0, x0 + len(ard_params))
bars.append(ax.bar(x, ard_params, align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name.replace("_"," ")))
xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))])
x0 += len(ard_params)
x = np.arange(x0)
transOffset = offset_copy(ax.transData, fig=fig,
x=0., y= -2., units='points')
transOffsetUp = offset_copy(ax.transData, fig=fig,
x=0., y=1., units='points')
for bar in bars:
for patch, num in zip(bar.patches, np.arange(len(bar.patches))):
height = patch.get_height()
xi = patch.get_x() + patch.get_width() / 2.
va = 'top'
c = 'w'
t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center')
transform = transOffset
if patch.get_extents().height <= t.get_extents().height + 3:
va = 'bottom'
c = 'k'
transform = transOffsetUp
ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform)
# for xi, t in zip(x, xticklabels):
# ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center')
# ax.set_xticklabels(xticklabels, rotation=17)
ax.set_xticks([])
ax.set_xlim(-.5, x0 - .5)
if legend:
if title is '':
mode = 'expand'
if len(bars) > 1:
mode = 'expand'
ax.legend(bbox_to_anchor=(0., 1.02, 1., 1.02), loc=3,
ncol=len(bars), mode=mode, borderaxespad=0.)
fig.tight_layout(rect=(0, 0, 1, .9))
else:
ax.legend()
return ax
# def _transform_gradients(self, g): # def _transform_gradients(self, g):
# """ # """
@ -530,61 +465,13 @@ class kern(Parameterized):
return target_mu, target_S return target_mu, target_S
def plot(self, x=None, plot_limits=None, which_parts='all', resolution=None, *args, **kwargs): def plot(self, *args, **kwargs):
if which_parts == 'all': """
which_parts = [True] * self.size See GPy.plotting.matplot_dep.plot
if self.input_dim == 1: """
if x is None: assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
x = np.zeros((1, 1)) from ..plotting.matplot_dep import kernel_plots
else: kernel_plots.plot(self,*args)
x = np.asarray(x)
assert x.size == 1, "The size of the fixed variable x is not 1"
x = x.reshape((1, 1))
if plot_limits == None:
xmin, xmax = (x - 5).flatten(), (x + 5).flatten()
elif len(plot_limits) == 2:
xmin, xmax = plot_limits
else:
raise ValueError, "Bad limits for plotting"
Xnew = np.linspace(xmin, xmax, resolution or 201)[:, None]
Kx = self.K(Xnew, x, which_parts)
pb.plot(Xnew, Kx, *args, **kwargs)
pb.xlim(xmin, xmax)
pb.xlabel("x")
pb.ylabel("k(x,%0.1f)" % x)
elif self.input_dim == 2:
if x is None:
x = np.zeros((1, 2))
else:
x = np.asarray(x)
assert x.size == 2, "The size of the fixed variable x is not 2"
x = x.reshape((1, 2))
if plot_limits == None:
xmin, xmax = (x - 5).flatten(), (x + 5).flatten()
elif len(plot_limits) == 2:
xmin, xmax = plot_limits
else:
raise ValueError, "Bad limits for plotting"
resolution = resolution or 51
xx, yy = np.mgrid[xmin[0]:xmax[0]:1j * resolution, xmin[1]:xmax[1]:1j * resolution]
xg = np.linspace(xmin[0], xmax[0], resolution)
yg = np.linspace(xmin[1], xmax[1], resolution)
Xnew = np.vstack((xx.flatten(), yy.flatten())).T
Kx = self.K(Xnew, x, which_parts)
Kx = Kx.reshape(resolution, resolution).T
pb.contour(xg, yg, Kx, vmin=Kx.min(), vmax=Kx.max(), cmap=pb.cm.jet, *args, **kwargs) # @UndefinedVariable
pb.xlim(xmin[0], xmax[0])
pb.ylim(xmin[1], xmax[1])
pb.xlabel("x1")
pb.ylabel("x2")
pb.title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1]))
else:
raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions"
from GPy.core.model import Model from GPy.core.model import Model

View file

@ -4,8 +4,6 @@
import numpy as np import numpy as np
from scipy import stats,special from scipy import stats,special
import scipy as sp import scipy as sp
import pylab as pb
from ..util.plot import gpplot
from ..util.univariate_Gaussian import std_norm_pdf,std_norm_cdf from ..util.univariate_Gaussian import std_norm_pdf,std_norm_cdf
import link_functions import link_functions
from ..util.misc import chain_1, chain_2, chain_3 from ..util.misc import chain_1, chain_2, chain_3

View file

@ -4,7 +4,6 @@
import numpy as np import numpy as np
from scipy import stats from scipy import stats
import scipy as sp import scipy as sp
import pylab as pb
from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf,inv_std_norm_cdf from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf,inv_std_norm_cdf
class GPTransformation(object): class GPTransformation(object):

View file

@ -3,14 +3,12 @@
import numpy as np import numpy as np
import itertools import itertools
from matplotlib import pyplot
from gplvm import GPLVM from gplvm import GPLVM
from .. import kern from .. import kern
from ..core import SparseGP from ..core import SparseGP
from ..likelihoods import Gaussian from ..likelihoods import Gaussian
from ..inference.optimization import SCG from ..inference.optimization import SCG
from ..util import plot_latent, linalg from ..util import linalg
from ..util.plot_latent import most_significant_input_dimensions
from ..core.parameterization.variational import Normal from ..core.parameterization.variational import Normal
class BayesianGPLVM(SparseGP, GPLVM): class BayesianGPLVM(SparseGP, GPLVM):
@ -75,11 +73,11 @@ class BayesianGPLVM(SparseGP, GPLVM):
# """ # """
# Horizontally stacks the parameters in order to present them to the optimizer. # Horizontally stacks the parameters in order to present them to the optimizer.
# The resulting 1-input_dim array has this structure: # The resulting 1-input_dim array has this structure:
# #
# =============================================================== # ===============================================================
# | mu | S | Z | theta | beta | # | mu | S | Z | theta | beta |
# =============================================================== # ===============================================================
# #
# """ # """
# x = np.hstack((self.X.flatten(), self.X_variance.flatten(), SparseGP._get_params(self))) # x = np.hstack((self.X.flatten(), self.X_variance.flatten(), SparseGP._get_params(self)))
# return x # return x
@ -131,7 +129,13 @@ class BayesianGPLVM(SparseGP, GPLVM):
# return np.hstack((self.dbound_dmuS.flatten(), self.dbound_dZtheta)) # return np.hstack((self.dbound_dmuS.flatten(), self.dbound_dZtheta))
def plot_latent(self, plot_inducing=True, *args, **kwargs): def plot_latent(self, plot_inducing=True, *args, **kwargs):
return plot_latent.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs) """
See GPy.plotting.matplot_dep.dim_reduction_plots.plot_latent
"""
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ..plotting.matplot_dep import dim_reduction_plots
return dim_reduction_plots.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs)
def do_test_latents(self, Y): def do_test_latents(self, Y):
""" """
@ -190,65 +194,14 @@ class BayesianGPLVM(SparseGP, GPLVM):
dK_dX[:, i] = self.kern.dK_dX(ones, Xnew, self.Z[i:i + 1, :]).sum(-1) dK_dX[:, i] = self.kern.dK_dX(ones, Xnew, self.Z[i:i + 1, :]).sum(-1)
return np.dot(dK_dX, self.Cpsi1Vf) return np.dot(dK_dX, self.Cpsi1Vf)
def plot_steepest_gradient_map(self, fignum=None, ax=None, which_indices=None, labels=None, data_labels=None, data_marker='o', data_s=40, resolution=20, aspect='auto', updates=False, ** kwargs): def plot_steepest_gradient_map(self, *args, ** kwargs):
input_1, input_2 = significant_dims = most_significant_input_dimensions(self, which_indices) """
See GPy.plotting.matplot_dep.dim_reduction_plots.plot_steepest_gradient_map
"""
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ..plotting.matplot_dep import dim_reduction_plots
X = np.zeros((resolution ** 2, self.input_dim)) return dim_reduction_plots.plot_steepest_gradient_map(model,*args,**kwargs)
indices = np.r_[:X.shape[0]]
if labels is None:
labels = range(self.output_dim)
def plot_function(x):
X[:, significant_dims] = x
dmu_dX = self.dmu_dXnew(X)
argmax = np.argmax(dmu_dX, 1)
return dmu_dX[indices, argmax], np.array(labels)[argmax]
if ax is None:
fig = pyplot.figure(num=fignum)
ax = fig.add_subplot(111)
if data_labels is None:
data_labels = np.ones(self.num_data)
ulabels = []
for lab in data_labels:
if not lab in ulabels:
ulabels.append(lab)
marker = itertools.cycle(list(data_marker))
from GPy.util import Tango
for i, ul in enumerate(ulabels):
if type(ul) is np.string_:
this_label = ul
elif type(ul) is np.int64:
this_label = 'class %i' % ul
else:
this_label = 'class %i' % i
m = marker.next()
index = np.nonzero(data_labels == ul)[0]
x = self.X[index, input_1]
y = self.X[index, input_2]
ax.scatter(x, y, marker=m, s=data_s, color=Tango.nextMedium(), label=this_label)
ax.set_xlabel('latent dimension %i' % input_1)
ax.set_ylabel('latent dimension %i' % input_2)
from matplotlib.cm import get_cmap
from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImAnnotateController
controller = ImAnnotateController(ax,
plot_function,
tuple(self.X.min(0)[:, significant_dims]) + tuple(self.X.max(0)[:, significant_dims]),
resolution=resolution,
aspect=aspect,
cmap=get_cmap('jet'),
**kwargs)
ax.legend()
ax.figure.tight_layout()
if updates:
pyplot.show()
clear = raw_input('Enter to continue')
if clear.lower() in 'yes' or clear == '':
controller.deactivate()
return controller.view
def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2):
""" """
@ -304,5 +257,3 @@ def latent_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2):
dlnS = S * (S0 + S1 + S2 - 0.5) + .5 dlnS = S * (S0 + S1 + S2 - 0.5) + .5
return -np.hstack((dmu.flatten(), dlnS.flatten())) return -np.hstack((dmu.flatten(), dlnS.flatten()))

View file

@ -0,0 +1,16 @@
# Copyright (c) 2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import base_plots
import models_plots
import priors_plots
import variational_plots
import kernel_plots
import svig_plots
import dim_reduction_plots
import mapping_plots
import Tango
import visualize
import latent_space_visualizations
import netpbmfile
import inference_plots

View file

@ -71,8 +71,8 @@ def align_subplots(N,M,xlim=None, ylim=None):
removeUpperTicks() removeUpperTicks()
def align_subplot_array(axes,xlim=None, ylim=None): def align_subplot_array(axes,xlim=None, ylim=None):
"""make all of the axes in the array hae the same limits, turn off unnecessary ticks """
Make all of the axes in the array hae the same limits, turn off unnecessary ticks
use pb.subplots() to get an array of axes use pb.subplots() to get an array of axes
""" """
#find sensible xlim,ylim #find sensible xlim,ylim

View file

@ -1,11 +1,16 @@
import pylab as pb import pylab as pb
import numpy as np import numpy as np
from .. import util from ... import util
from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImshowController from latent_space_visualizations.controllers.imshow_controller import ImshowController,ImAnnotateController
from misc import param_to_array from GPy.util.misc import param_to_array
import itertools import itertools
import Tango
from matplotlib.cm import get_cmap
def most_significant_input_dimensions(model, which_indices): def most_significant_input_dimensions(model, which_indices):
"""
Determine which dimensions should be plotted
"""
if which_indices is None: if which_indices is None:
if model.input_dim == 1: if model.input_dim == 1:
input_1 = 0 input_1 = 0
@ -39,7 +44,7 @@ def plot_latent(model, labels=None, which_indices=None,
input_1, input_2 = most_significant_input_dimensions(model, which_indices) input_1, input_2 = most_significant_input_dimensions(model, which_indices)
X = param_to_array(model.X) X = param_to_array(model.X)
# first, plot the output variance as a function of the latent space # first, plot the output variance as a function of the latent space
Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(X[:, [input_1, input_2]], resolution=resolution) Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(X[:, [input_1, input_2]], resolution=resolution)
Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1]))
@ -49,6 +54,7 @@ def plot_latent(model, labels=None, which_indices=None,
mu, var, low, up = model.predict(Xtest_full) mu, var, low, up = model.predict(Xtest_full)
var = var[:, :1] var = var[:, :1]
return np.log(var) return np.log(var)
view = ImshowController(ax, plot_function, view = ImshowController(ax, plot_function,
tuple(X[:, [input_1, input_2]].min(0)) + tuple(X[:, [input_1, input_2]].max(0)), tuple(X[:, [input_1, input_2]].min(0)) + tuple(X[:, [input_1, input_2]].max(0)),
resolution, aspect=aspect, interpolation='bilinear', resolution, aspect=aspect, interpolation='bilinear',
@ -124,10 +130,12 @@ def plot_magnification(model, labels=None, which_indices=None,
# first, plot the output variance as a function of the latent space # first, plot the output variance as a function of the latent space
Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution)
Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1]))
def plot_function(x): def plot_function(x):
Xtest_full[:, [input_1, input_2]] = x Xtest_full[:, [input_1, input_2]] = x
mf=model.magnification(Xtest_full) mf=model.magnification(Xtest_full)
return mf return mf
view = ImshowController(ax, plot_function, view = ImshowController(ax, plot_function,
tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]),
resolution, aspect=aspect, interpolation='bilinear', resolution, aspect=aspect, interpolation='bilinear',
@ -179,3 +187,62 @@ def plot_magnification(model, labels=None, which_indices=None,
pb.title('Magnification Factor') pb.title('Magnification Factor')
return ax return ax
def plot_steepest_gradient_map(model, fignum=None, ax=None, which_indices=None, labels=None, data_labels=None, data_marker='o', data_s=40, resolution=20, aspect='auto', updates=False, ** kwargs):
input_1, input_2 = significant_dims = most_significant_input_dimensions(model, which_indices)
X = np.zeros((resolution ** 2, model.input_dim))
indices = np.r_[:X.shape[0]]
if labels is None:
labels = range(model.output_dim)
def plot_function(x):
X[:, significant_dims] = x
dmu_dX = model.dmu_dXnew(X)
argmax = np.argmax(dmu_dX, 1)
return dmu_dX[indices, argmax], np.array(labels)[argmax]
if ax is None:
fig = pyplot.figure(num=fignum)
ax = fig.add_subplot(111)
if data_labels is None:
data_labels = np.ones(model.num_data)
ulabels = []
for lab in data_labels:
if not lab in ulabels:
ulabels.append(lab)
marker = itertools.cycle(list(data_marker))
for i, ul in enumerate(ulabels):
if type(ul) is np.string_:
this_label = ul
elif type(ul) is np.int64:
this_label = 'class %i' % ul
else:
this_label = 'class %i' % i
m = marker.next()
index = np.nonzero(data_labels == ul)[0]
x = model.X[index, input_1]
y = model.X[index, input_2]
ax.scatter(x, y, marker=m, s=data_s, color=Tango.nextMedium(), label=this_label)
ax.set_xlabel('latent dimension %i' % input_1)
ax.set_ylabel('latent dimension %i' % input_2)
controller = ImAnnotateController(ax,
plot_function,
tuple(model.X.min(0)[:, significant_dims]) + tuple(model.X.max(0)[:, significant_dims]),
resolution=resolution,
aspect=aspect,
cmap=get_cmap('jet'),
**kwargs)
ax.legend()
ax.figure.tight_layout()
if updates:
pyplot.show()
clear = raw_input('Enter to continue')
if clear.lower() in 'yes' or clear == '':
controller.deactivate()
return controller.view

View file

@ -0,0 +1,28 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
#import numpy as np
#import Tango
#from base_plots import gpplot, x_frame1D, x_frame2D
def plot_optimizer(optimizer):
if optimizer.trace == None:
print "No trace present so I can't plot it. Please check that the optimizer actually supplies a trace."
else:
pb.figure()
pb.plot(optimizer.trace)
pb.xlabel('Iteration')
pb.ylabel('f(x)')
def plot_sgd_traces(optimizer):
pb.figure()
pb.subplot(211)
pb.title('Parameters')
for k in optimizer.param_traces.keys():
pb.plot(optimizer.param_traces[k], label=k)
pb.legend(loc=0)
pb.subplot(212)
pb.title('Objective function')
pb.plot(optimizer.fopt_trace)

View file

@ -0,0 +1,137 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import sys
import numpy as np
import pylab as pb
import Tango
from matplotlib.textpath import TextPath
from matplotlib.transforms import offset_copy
def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False):
"""If an ARD kernel is present, plot a bar representation using matplotlib
:param fignum: figure number of the plot
:param ax: matplotlib axis to plot on
:param title:
title of the plot,
pass '' to not print a title
pass None for a generic title
"""
if ax is None:
fig = pb.figure(fignum)
ax = fig.add_subplot(111)
else:
fig = ax.figure
Tango.reset()
xticklabels = []
bars = []
x0 = 0
for p in kernel._parameters_:
c = Tango.nextMedium()
if hasattr(p, 'ARD') and p.ARD:
if title is None:
ax.set_title('ARD parameters, %s kernel' % p.name)
else:
ax.set_title(title)
if isinstance(p, Linear):
ard_params = p.variances
else:
ard_params = 1. / p.lengthscale
x = np.arange(x0, x0 + len(ard_params))
bars.append(ax.bar(x, ard_params, align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name.replace("_"," ")))
xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))])
x0 += len(ard_params)
x = np.arange(x0)
transOffset = offset_copy(ax.transData, fig=fig,
x=0., y= -2., units='points')
transOffsetUp = offset_copy(ax.transData, fig=fig,
x=0., y=1., units='points')
for bar in bars:
for patch, num in zip(bar.patches, np.arange(len(bar.patches))):
height = patch.get_height()
xi = patch.get_x() + patch.get_width() / 2.
va = 'top'
c = 'w'
t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center')
transform = transOffset
if patch.get_extents().height <= t.get_extents().height + 3:
va = 'bottom'
c = 'k'
transform = transOffsetUp
ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform)
# for xi, t in zip(x, xticklabels):
# ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center')
# ax.set_xticklabels(xticklabels, rotation=17)
ax.set_xticks([])
ax.set_xlim(-.5, x0 - .5)
if legend:
if title is '':
mode = 'expand'
if len(bars) > 1:
mode = 'expand'
ax.legend(bbox_to_anchor=(0., 1.02, 1., 1.02), loc=3,
ncol=len(bars), mode=mode, borderaxespad=0.)
fig.tight_layout(rect=(0, 0, 1, .9))
else:
ax.legend()
return ax
def plot(kernel, x=None, plot_limits=None, which_parts='all', resolution=None, *args, **kwargs):
if which_parts == 'all':
which_parts = [True] * kernel.size
if kernel.input_dim == 1:
if x is None:
x = np.zeros((1, 1))
else:
x = np.asarray(x)
assert x.size == 1, "The size of the fixed variable x is not 1"
x = x.reshape((1, 1))
if plot_limits == None:
xmin, xmax = (x - 5).flatten(), (x + 5).flatten()
elif len(plot_limits) == 2:
xmin, xmax = plot_limits
else:
raise ValueError, "Bad limits for plotting"
Xnew = np.linspace(xmin, xmax, resolution or 201)[:, None]
Kx = kernel.K(Xnew, x, which_parts)
pb.plot(Xnew, Kx, *args, **kwargs)
pb.xlim(xmin, xmax)
pb.xlabel("x")
pb.ylabel("k(x,%0.1f)" % x)
elif kernel.input_dim == 2:
if x is None:
x = np.zeros((1, 2))
else:
x = np.asarray(x)
assert x.size == 2, "The size of the fixed variable x is not 2"
x = x.reshape((1, 2))
if plot_limits == None:
xmin, xmax = (x - 5).flatten(), (x + 5).flatten()
elif len(plot_limits) == 2:
xmin, xmax = plot_limits
else:
raise ValueError, "Bad limits for plotting"
resolution = resolution or 51
xx, yy = np.mgrid[xmin[0]:xmax[0]:1j * resolution, xmin[1]:xmax[1]:1j * resolution]
xg = np.linspace(xmin[0], xmax[0], resolution)
yg = np.linspace(xmin[1], xmax[1], resolution)
Xnew = np.vstack((xx.flatten(), yy.flatten())).T
Kx = kernel.K(Xnew, x, which_parts)
Kx = Kx.reshape(resolution, resolution).T
pb.contour(xg, yg, Kx, vmin=Kx.min(), vmax=Kx.max(), cmap=pb.cm.jet, *args, **kwargs) # @UndefinedVariable
pb.xlim(xmin[0], xmax[0])
pb.ylim(xmin[1], xmax[1])
pb.xlabel("x1")
pb.ylabel("x2")
pb.title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1]))
else:
raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions"

View file

@ -80,13 +80,13 @@ class AxisChangedController(AxisEventController):
class BufferedAxisChangedController(AxisChangedController): class BufferedAxisChangedController(AxisChangedController):
def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=None, **kwargs): def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=None, **kwargs):
""" """
:param plot_function: :param plot_function:
function to use for creating image for plotting (return ndarray-like) function to use for creating image for plotting (return ndarray-like)
plot_function gets called with (2D!) Xtest grid if replotting required plot_function gets called with (2D!) Xtest grid if replotting required
:type plot_function: function :type plot_function: function
:param plot_limits: :param plot_limits:
beginning plot limits [xmin, ymin, xmax, ymax] beginning plot limits [xmin, ymin, xmax, ymax]
:param kwargs: additional kwargs are for pyplot.imshow(**kwargs) :param kwargs: additional kwargs are for pyplot.imshow(**kwargs)
""" """
super(BufferedAxisChangedController, self).__init__(ax, update_lim=update_lim) super(BufferedAxisChangedController, self).__init__(ax, update_lim=update_lim)
@ -137,6 +137,3 @@ class BufferedAxisChangedController(AxisChangedController):
except: except:
buffersize = .4 buffersize = .4
return buffersize return buffersize

View file

@ -3,7 +3,7 @@ Created on 24 Jul 2013
@author: maxz @author: maxz
''' '''
from GPy.util.latent_space_visualizations.controllers.axis_event_controller import BufferedAxisChangedController from axis_event_controller import BufferedAxisChangedController
import itertools import itertools
import numpy import numpy
@ -11,13 +11,13 @@ import numpy
class ImshowController(BufferedAxisChangedController): class ImshowController(BufferedAxisChangedController):
def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=.5, **kwargs): def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=.5, **kwargs):
""" """
:param plot_function: :param plot_function:
function to use for creating image for plotting (return ndarray-like) function to use for creating image for plotting (return ndarray-like)
plot_function gets called with (2D!) Xtest grid if replotting required plot_function gets called with (2D!) Xtest grid if replotting required
:type plot_function: function :type plot_function: function
:param plot_limits: :param plot_limits:
beginning plot limits [xmin, ymin, xmax, ymax] beginning plot limits [xmin, ymin, xmax, ymax]
:param kwargs: additional kwargs are for pyplot.imshow(**kwargs) :param kwargs: additional kwargs are for pyplot.imshow(**kwargs)
""" """
super(ImshowController, self).__init__(ax, plot_function, plot_limits, resolution, update_lim, **kwargs) super(ImshowController, self).__init__(ax, plot_function, plot_limits, resolution, update_lim, **kwargs)
@ -36,7 +36,7 @@ class ImshowController(BufferedAxisChangedController):
class ImAnnotateController(ImshowController): class ImAnnotateController(ImshowController):
def __init__(self, ax, plot_function, plot_limits, resolution=20, update_lim=.99, **kwargs): def __init__(self, ax, plot_function, plot_limits, resolution=20, update_lim=.99, **kwargs):
""" """
:param plot_function: :param plot_function:
function to use for creating image for plotting (return ndarray-like) function to use for creating image for plotting (return ndarray-like)
plot_function gets called with (2D!) Xtest grid if replotting required plot_function gets called with (2D!) Xtest grid if replotting required
:type plot_function: function :type plot_function: function

View file

@ -0,0 +1,81 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
import numpy as np
import Tango
from base_plots import x_frame1D, x_frame2D
def plot_mapping(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue']):
"""
Plots the mapping associated with the model.
- In one dimension, the function is plotted.
- In two dimsensions, a contour-plot shows the function
- In higher dimensions, we've not implemented this yet !TODO!
Can plot only part of the data and part of the posterior functions
using which_data and which_functions
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits
:type plot_limits: np.array
:param which_data: which if the training data to plot (default all)
:type which_data: 'all' or a slice object to slice self.X, self.Y
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param levels: number of levels to plot in a contour plot.
:type levels: int
:param samples: the number of a posteriori samples to plot
:type samples: int
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.
:type fixed_inputs: a list of tuples
:param linecol: color of line to plot.
:type linecol:
:param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure
"""
# TODO include samples
if which_data == 'all':
which_data = slice(None)
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
plotdims = self.input_dim - len(fixed_inputs)
if plotdims == 1:
Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now
fixed_dims = np.array([i for i,v in fixed_inputs])
freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims)
Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits)
Xgrid = np.empty((Xnew.shape[0],self.input_dim))
Xgrid[:,freedim] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
f = self.predict(Xgrid, which_parts=which_parts)
for d in range(y.shape[1]):
ax.plot(Xnew, f[:, d], edgecol=linecol)
elif self.X.shape[1] == 2:
resolution = resolution or 50
Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution)
x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution)
f = self.predict(Xnew, which_parts=which_parts)
m = m.reshape(resolution, resolution).T
ax.contour(x, y, f, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable
ax.set_xlim(xmin[0], xmax[0])
ax.set_ylim(xmin[1], xmax[1])
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"

View file

@ -0,0 +1,161 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
import numpy as np
import Tango
from base_plots import gpplot, x_frame1D, x_frame2D
def plot_fit(model, plot_limits=None, which_data_rows='all',
which_data_ycols='all', which_parts='all', fixed_inputs=[],
levels=20, samples=0, fignum=None, ax=None, resolution=None,
plot_raw=False,
linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']):
"""
Plot the posterior of the GP.
- In one dimension, the function is plotted with a shaded region identifying two standard deviations.
- In two dimsensions, a contour-plot shows the mean predicted function
- In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed.
Can plot only part of the data and part of the posterior functions
using which_data_rowsm which_data_ycols and which_parts
:param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits
:type plot_limits: np.array
:param which_data_rows: which of the training data to plot (default all)
:type which_data_rows: 'all' or a slice object to slice model.X, model.Y
:param which_data_ycols: when the data has several columns (independant outputs), only plot these
:type which_data_rows: 'all' or a list of integers
:param which_parts: which of the kernel functions to plot (additively)
:type which_parts: 'all', or list of bools
:param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.
:type fixed_inputs: a list of tuples
:param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D
:type resolution: int
:param levels: number of levels to plot in a contour plot.
:type levels: int
:param samples: the number of a posteriori samples to plot
:type samples: int
:param fignum: figure to plot on.
:type fignum: figure number
:param ax: axes to plot on.
:type ax: axes handle
:type output: integer (first output is 0)
:param linecol: color of line to plot.
:type linecol:
:param fillcol: color of fill
:param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure
"""
#deal with optional arguments
if which_data_rows == 'all':
which_data_rows = slice(None)
if which_data_ycols == 'all':
which_data_ycols = np.arange(model.output_dim)
if len(which_data_ycols)==0:
raise ValueError('No data selected for plotting')
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
#work out what the inputs are for plotting (1D or 2D)
fixed_dims = np.array([i for i,v in fixed_inputs])
free_dims = np.setdiff1d(np.arange(model.input_dim),fixed_dims)
#one dimensional plotting
if len(free_dims) == 1:
#define the frame on which to plot
resolution = resolution or 200
Xnew, xmin, xmax = x_frame1D(model.X[:,free_dims], plot_limits=plot_limits)
Xgrid = np.empty((Xnew.shape[0],model.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
#make a prediction on the frame and plot it
if plot_raw:
m, v = model._raw_predict(Xgrid, which_parts=which_parts)
lower = m - 2*np.sqrt(v)
upper = m + 2*np.sqrt(v)
Y = model.Y
else:
m, v, lower, upper = model.predict(Xgrid, which_parts=which_parts)
Y = model.Y
for d in which_data_ycols:
gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol)
ax.plot(model.X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5)
#optionally plot some samples
if samples: #NOTE not tested with fixed_inputs
Ysim = model.posterior_samples(Xgrid, samples, which_parts=which_parts)
for yi in Ysim.T:
ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25)
#ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs.
#add inducing inputs (if a sparse model is used)
if hasattr(model,"Z"):
Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12)
#add error bars for uncertain (if input uncertainty is being modelled)
if hasattr(model,"has_uncertain_inputs"):
ax.errorbar(model.X[which_data, free_dims], model.likelihood.data[which_data, 0],
xerr=2 * np.sqrt(model.X_variance[which_data, free_dims]),
ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
#set the limits of the plot to some sensible values
ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper))
ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin)
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
#2D plotting
elif len(free_dims) == 2:
#define the frame for plotting on
resolution = resolution or 50
Xnew, _, _, xmin, xmax = x_frame2D(model.X[:,free_dims], plot_limits, resolution)
Xgrid = np.empty((Xnew.shape[0],model.input_dim))
Xgrid[:,free_dims] = Xnew
for i,v in fixed_inputs:
Xgrid[:,i] = v
x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution)
#predict on the frame and plot
if plot_raw:
m, _ = model._raw_predict(Xgrid, which_parts=which_parts)
Y = model.likelihood.Y
else:
m, _, _, _ = model.predict(Xgrid, which_parts=which_parts,sampling=False)
Y = model.likelihood.data
for d in which_data_ycols:
m_d = m[:,d].reshape(resolution, resolution).T
ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet)
ax.scatter(model.X[which_data_rows, free_dims[0]], model.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.)
#set the limits of the plot to some sensible values
ax.set_xlim(xmin[0], xmax[0])
ax.set_ylim(xmin[1], xmax[1])
if samples:
warnings.warn("Samples are rather difficult to plot for 2D inputs...")
#add inducing inputs (if a sparse model is used)
if hasattr(model,"Z"):
Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]
ax.plot(Zu[:,free_dims[0]], Zu[:,free_dims[1]], 'wo')
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
def plot_f_fit(model, *args, **kwargs):
"""
Plot the GP's view of the world, where the data is normalized and before applying a likelihood.
All args and kwargs are passed on to models_plots.plot.
"""
kwargs['plot_raw'] = True
plot(model,*args, **kwargs)

View file

@ -0,0 +1,29 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
def univariate_plot(prior):
rvs = prior.rvs(1000)
pb.hist(rvs, 100, normed=True)
xmin, xmax = pb.xlim()
xx = np.linspace(xmin, xmax, 1000)
pb.plot(xx, prior.pdf(xx), 'r', linewidth=2)
def plot(prior):
if prior.input_dim == 2:
rvs = prior.rvs(200)
pb.plot(rvs[:, 0], rvs[:, 1], 'kx', mew=1.5)
xmin, xmax = pb.xlim()
ymin, ymax = pb.ylim()
xx, yy = np.mgrid[xmin:xmax:100j, ymin:ymax:100j]
xflat = np.vstack((xx.flatten(), yy.flatten())).T
zz = prior.pdf(xflat).reshape(100, 100)
pb.contour(xx, yy, zz, linewidths=2)
else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions"

View file

@ -0,0 +1,43 @@
# Copyright (c) 2012, James Hensman and Nicolo' Fusi
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
import pylab as pb
def plot(model, ax=None, fignum=None, Z_height=None, **kwargs):
if ax is None:
fig = pb.figure(num=fignum)
ax = fig.add_subplot(111)
#horrible hack here:
data = model.likelihood.data.copy()
model.likelihood.data = model.Y
GP.plot(model, ax=ax, **kwargs)
model.likelihood.data = data
Zu = model.Z * model._Xscale + model._Xoffset
if model.input_dim==1:
ax.plot(model.X_batch, model.likelihood.data, 'gx',mew=2)
if Z_height is None:
Z_height = ax.get_ylim()[0]
ax.plot(Zu, np.zeros_like(Zu) + Z_height, 'r|', mew=1.5, markersize=12)
if model.input_dim==2:
ax.scatter(model.X[:,0], model.X[:,1], 20., model.Y[:,0], linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable
ax.plot(Zu[:,0], Zu[:,1], 'w^')
def plot_traces(model):
pb.figure()
t = np.array(model._param_trace)
pb.subplot(2,1,1)
for l,ti in zip(model._get_param_names(),t.T):
if not l[:3]=='iip':
pb.plot(ti,label=l)
pb.legend(loc=0)
pb.subplot(2,1,2)
pb.plot(np.asarray(model._ll_trace),label='stochastic likelihood')
pb.legend(loc=0)

View file

@ -0,0 +1,45 @@
import pylab as pb
def plot(parameterized, fignum=None, ax=None, colors=None):
"""
Plot latent space X in 1D:
- if fig is given, create input_dim subplots in fig and plot in these
- if ax is given plot input_dim 1D latent space plots of X into each `axis`
- if neither fig nor ax is given create a figure with fignum and plot in there
colors:
colors of different latent space dimensions input_dim
"""
if ax is None:
fig = pb.figure(num=fignum, figsize=(8, min(12, (2 * parameterized.means.shape[1]))))
if colors is None:
colors = pb.gca()._get_lines.color_cycle
pb.clf()
else:
colors = iter(colors)
plots = []
means, variances = param_to_array(parameterized.means, parameterized.variances)
x = np.arange(means.shape[0])
for i in range(means.shape[1]):
if ax is None:
a = fig.add_subplot(means.shape[1], 1, i + 1)
elif isinstance(ax, (tuple, list)):
a = ax[i]
else:
raise ValueError("Need one ax per latent dimension input_dim")
a.plot(means, c='k', alpha=.3)
plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
a.fill_between(x,
means.T[i] - 2 * np.sqrt(variances.T[i]),
means.T[i] + 2 * np.sqrt(variances.T[i]),
facecolor=plots[-1].get_color(),
alpha=.3)
a.legend(borderaxespad=0.)
a.set_xlim(x.min(), x.max())
if i < means.shape[1] - 1:
a.set_xticklabels('')
pb.draw()
fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
return fig

View file

@ -177,7 +177,7 @@ class lvm_subplots(lvm):
assert len(latent_axes)==self.nplots assert len(latent_axes)==self.nplots
if vals==None: if vals==None:
vals = Model.X[0, :] vals = Model.X[0, :]
self.latent_values = vals self.latent_values = vals
for i, axis in enumerate(latent_axes): for i, axis in enumerate(latent_axes):
if i == self.nplots-1: if i == self.nplots-1:
@ -195,7 +195,7 @@ class lvm_dimselect(lvm):
A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a bar chart of their length scales. A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a bar chart of their length scales.
For an example of the visualizer's use try: For an example of the visualizer's use try:
GPy.examples.dimensionality_reduction.BGPVLM_oil() GPy.examples.dimensionality_reduction.BGPVLM_oil()
""" """
@ -219,11 +219,11 @@ class lvm_dimselect(lvm):
new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.input_dim-1)) new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.input_dim-1))
if event.button == 1: if event.button == 1:
# Make it red if and y-axis (red=port=left) if it is a left button click # Make it red if and y-axis (red=port=left) if it is a left button click
self.latent_index[1] = new_index self.latent_index[1] = new_index
else: else:
# Make it green and x-axis (green=starboard=right) if it is a right button click # Make it green and x-axis (green=starboard=right) if it is a right button click
self.latent_index[0] = new_index self.latent_index[0] = new_index
self.show_sensitivities() self.show_sensitivities()
self.latent_axes.cla() self.latent_axes.cla()
@ -288,7 +288,7 @@ class image_show(matplotlib_show):
def modify(self, vals): def modify(self, vals):
self.set_image(vals.copy()) self.set_image(vals.copy())
self.handle.set_array(self.vals) self.handle.set_array(self.vals)
self.axes.figure.canvas.draw() self.axes.figure.canvas.draw()
def set_image(self, vals): def set_image(self, vals):
dim = self.dimensions[0] * self.dimensions[1] dim = self.dimensions[0] * self.dimensions[1]
@ -306,7 +306,7 @@ class image_show(matplotlib_show):
last_col = (iC+1)*self.dimensions[1] last_col = (iC+1)*self.dimensions[1]
self.vals[first_row:last_row, first_col:last_col] = cur_img self.vals[first_row:last_row, first_col:last_col] = cur_img
else: else:
self.vals = np.reshape(vals[0,dim*self.select_image+np.array(range(dim))], self.dimensions, order=self.order) self.vals = np.reshape(vals[0,dim*self.select_image+np.array(range(dim))], self.dimensions, order=self.order)
if self.transpose: if self.transpose:
self.vals = self.vals.T self.vals = self.vals.T
@ -359,7 +359,7 @@ class mocap_data_show_vpython(vpython_show):
def modify_edges(self): def modify_edges(self):
self.line_handle = [] self.line_handle = []
if not self.connect==None: if not self.connect==None:
self.I, self.J = np.nonzero(self.connect) self.I, self.J = np.nonzero(self.connect)
for rod, i, j in zip(self.rods, self.I, self.J): for rod, i, j in zip(self.rods, self.I, self.J):
rod.pos, rod.axis = self.pos_axis(i, j) rod.pos, rod.axis = self.pos_axis(i, j)
@ -404,7 +404,7 @@ class mocap_data_show(matplotlib_show):
def draw_vertices(self): def draw_vertices(self):
self.points_handle = self.axes.scatter(self.vals[:, 0], self.vals[:, 1], self.vals[:, 2]) self.points_handle = self.axes.scatter(self.vals[:, 0], self.vals[:, 1], self.vals[:, 2])
def draw_edges(self): def draw_edges(self):
self.line_handle = [] self.line_handle = []
if not self.connect==None: if not self.connect==None:
@ -423,7 +423,7 @@ class mocap_data_show(matplotlib_show):
z.append(self.vals[j, 2]) z.append(self.vals[j, 2])
z.append(np.NaN) z.append(np.NaN)
self.line_handle = self.axes.plot(np.array(x), np.array(y), np.array(z), 'b-') self.line_handle = self.axes.plot(np.array(x), np.array(y), np.array(z), 'b-')
def modify(self, vals): def modify(self, vals):
self.vals = vals.copy() self.vals = vals.copy()
self.process_values() self.process_values()

View file

@ -4,21 +4,12 @@
import linalg import linalg
import misc import misc
import plot
import squashers import squashers
import Tango
import warping_functions import warping_functions
import datasets import datasets
import mocap import mocap
import visualize
import decorators import decorators
import classification import classification
import latent_space_visualizations
try:
import maps
except:
pass
maps = "warning: the maps module requires pyshp (shapefile). Install it to remove this message"
try: try:
import sympy import sympy
@ -29,5 +20,3 @@ except ImportError as e:
if _sympy_available: if _sympy_available:
import symbolic import symbolic
import netpbmfile

View file

@ -279,14 +279,14 @@ def ppca(Y, Q, iterations=100):
def ppca_missing_data_at_random(Y, Q, iters=100): def ppca_missing_data_at_random(Y, Q, iters=100):
""" """
EM implementation of Probabilistic pca for when there is missing data. EM implementation of Probabilistic pca for when there is missing data.
Taken from <SheffieldML, https://github.com/SheffieldML> Taken from <SheffieldML, https://github.com/SheffieldML>
.. math: .. math:
\\mathbf{Y} = \mathbf{XW} + \\epsilon \\text{, where} \\mathbf{Y} = \mathbf{XW} + \\epsilon \\text{, where}
\\epsilon = \\mathcal{N}(0, \\sigma^2 \mathbf{I}) \\epsilon = \\mathcal{N}(0, \\sigma^2 \mathbf{I})
:returns: X, W, sigma^2 :returns: X, W, sigma^2
""" """
from numpy.ma import dot as madot from numpy.ma import dot as madot
import diag import diag
@ -300,19 +300,21 @@ def ppca_missing_data_at_random(Y, Q, iters=100):
nu = 1. nu = 1.
#num_obs_i = 1./Y.count() #num_obs_i = 1./Y.count()
Ycentered = Y - Y.mean(0) Ycentered = Y - Y.mean(0)
X = np.zeros((N,Q)) X = np.zeros((N,Q))
cs = common_subarrays(Y.mask) cs = common_subarrays(Y.mask)
cr = common_subarrays(Y.mask, 1) cr = common_subarrays(Y.mask, 1)
Sigma = np.zeros((N, Q, Q)) Sigma = np.zeros((N, Q, Q))
Sigma2 = np.zeros((N, Q, Q)) Sigma2 = np.zeros((N, Q, Q))
mu = np.zeros(D) mu = np.zeros(D)
"""
if debug: if debug:
import matplotlib.pyplot as pylab import matplotlib.pyplot as pylab
fig = pylab.figure("FIT MISSING DATA"); fig = pylab.figure("FIT MISSING DATA");
ax = fig.gca() ax = fig.gca()
ax.cla() ax.cla()
lines = pylab.plot(np.zeros((N,Q)).dot(W)) lines = pylab.plot(np.zeros((N,Q)).dot(W))
"""
W2 = np.zeros((Q,D)) W2 = np.zeros((Q,D))
for i in range(iters): for i in range(iters):
@ -358,6 +360,7 @@ def ppca_missing_data_at_random(Y, Q, iters=100):
nu2 /= N nu2 /= N
nu4 = (((Ycentered - X.dot(W))**2).sum(0) + W.T.dot(Sigma.sum(0).dot(W)).sum(0)).sum()/N nu4 = (((Ycentered - X.dot(W))**2).sum(0) + W.T.dot(Sigma.sum(0).dot(W)).sum(0)).sum()/N
import ipdb;ipdb.set_trace() import ipdb;ipdb.set_trace()
"""
if debug: if debug:
#print Sigma[0] #print Sigma[0]
print "nu:", nu, "sum(X):", X.sum() print "nu:", nu, "sum(X):", X.sum()
@ -368,6 +371,7 @@ def ppca_missing_data_at_random(Y, Q, iters=100):
ax.set_ylim(pred_y.min(), pred_y.max()) ax.set_ylim(pred_y.min(), pred_y.max())
fig.canvas.draw() fig.canvas.draw()
time.sleep(.3) time.sleep(.3)
"""
return np.asarray_chkfinite(X), np.asarray_chkfinite(W), nu return np.asarray_chkfinite(X), np.asarray_chkfinite(W), nu