diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 6f02d7df..f54f5721 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -2,10 +2,9 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np -import pylab as pb +import sys import warnings from .. import kern -from ..util.plot import gpplot, Tango, x_frame1D, x_frame2D from ..util.linalg import dtrtrs from model import Model from parameterization import ObservableArray @@ -122,9 +121,9 @@ class GP(Model): :param X: The points at which to take the samples. :type X: np.ndarray, Nnew x self.input_dim. - :param size: the number of a posteriori samples to plot. + :param size: the number of a posteriori samples. :type size: int. - :param which_parts: which of the kernel functions to plot (additively). + :param which_parts: which of the kernel functions to use (additively). :type which_parts: 'all', or list of bools. :param full_cov: whether to return the full covariance matrix, or just the diagonal. :type full_cov: bool. @@ -145,9 +144,9 @@ class GP(Model): :param X: the points at which to take the samples. :type X: np.ndarray, Nnew x self.input_dim. - :param size: the number of a posteriori samples to plot. + :param size: the number of a posteriori samples. :type size: int. - :param which_parts: which of the kernel functions to plot (additively). + :param which_parts: which of the kernel functions to use (additively). :type which_parts: 'all', or list of bools. :param full_cov: whether to return the full covariance matrix, or just the diagonal. :type full_cov: bool. @@ -172,20 +171,13 @@ class GP(Model): """ Plot the GP's view of the world, where the data is normalized and before applying a likelihood. - This is a convenience function: we simply call self.plot with the - argument use_raw_predict set True. All args and kwargs are passed on to - plot. - - see also: gp.plot + This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_f_fit """ - kwargs['plot_raw'] = True - self.plot(*args, **kwargs) + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import models_plots + models_plots.plot_fit_f(self,*args,**kwargs) - def plot(self, plot_limits=None, which_data_rows='all', - which_data_ycols='all', which_parts='all', fixed_inputs=[], - levels=20, samples=0, fignum=None, ax=None, resolution=None, - plot_raw=False, - linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): + def plot(self, *args): """ Plot the posterior of the GP. - In one dimension, the function is plotted with a shaded region identifying two standard deviations. @@ -193,121 +185,13 @@ class GP(Model): - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. Can plot only part of the data and part of the posterior functions - using which_data_rowsm which_data_ycols and which_parts + using which_data_rows which_data_ycols and which_parts - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :type plot_limits: np.array - :param which_data_rows: which of the training data to plot (default all) - :type which_data_rows: 'all' or a slice object to slice self.X, self.Y - :param which_data_ycols: when the data has several columns (independant outputs), only plot these - :type which_data_rows: 'all' or a list of integers - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. - :type fixed_inputs: a list of tuples - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param levels: number of levels to plot in a contour plot. - :type levels: int - :param samples: the number of a posteriori samples to plot - :type samples: int - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle - :type output: integer (first output is 0) - :param linecol: color of line to plot. - :type linecol: - :param fillcol: color of fill - :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_fit """ - #deal with optional arguments - if which_data_rows == 'all': - which_data_rows = slice(None) - if which_data_ycols == 'all': - which_data_ycols = np.arange(self.output_dim) - if len(which_data_ycols)==0: - raise ValueError('No data selected for plotting') - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - #work out what the inputs are for plotting (1D or 2D) - fixed_dims = np.array([i for i,v in fixed_inputs]) - free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims) - - #one dimensional plotting - if len(free_dims) == 1: - - #define the frame on which to plot - resolution = resolution or 200 - Xnew, xmin, xmax = x_frame1D(self.X[:,free_dims], plot_limits=plot_limits) - Xgrid = np.empty((Xnew.shape[0],self.input_dim)) - Xgrid[:,free_dims] = Xnew - for i,v in fixed_inputs: - Xgrid[:,i] = v - - #make a prediction on the frame and plot it - if plot_raw: - m, v = self._raw_predict(Xgrid, which_parts=which_parts) - lower = m - 2*np.sqrt(v) - upper = m + 2*np.sqrt(v) - Y = self.Y - else: - m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts) - Y = self.Y - for d in which_data_ycols: - gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) - ax.plot(self.X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) - - #optionally plot some samples - if samples: #NOTE not tested with fixed_inputs - Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts) - for yi in Ysim.T: - ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) - #ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs. - - #set the limits of the plot to some sensible values - ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper)) - ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) - ax.set_xlim(xmin, xmax) - ax.set_ylim(ymin, ymax) - - #2D plotting - elif len(free_dims) == 2: - - #define the frame for plotting on - resolution = resolution or 50 - Xnew, _, _, xmin, xmax = x_frame2D(self.X[:,free_dims], plot_limits, resolution) - Xgrid = np.empty((Xnew.shape[0],self.input_dim)) - Xgrid[:,free_dims] = Xnew - for i,v in fixed_inputs: - Xgrid[:,i] = v - x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) - - #predict on the frame and plot - if plot_raw: - m, _ = self._raw_predict(Xgrid, which_parts=which_parts) - Y = self.likelihood.Y - else: - m, _, _, _ = self.predict(Xgrid, which_parts=which_parts,sampling=False) - Y = self.likelihood.data - for d in which_data_ycols: - m_d = m[:,d].reshape(resolution, resolution).T - ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) - ax.scatter(self.X[which_data_rows, free_dims[0]], self.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) - - #set the limits of the plot to some sensible values - ax.set_xlim(xmin[0], xmax[0]) - ax.set_ylim(xmin[1], xmax[1]) - - if samples: - warnings.warn("Samples are rather difficult to plot for 2D inputs...") - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - - + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import models_plots + models_plots.plot_fit(self,*args) def _getstate(self): """ @@ -333,5 +217,3 @@ class GP(Model): self.num_data = state.pop() self.X = state.pop() Model._setstate(self, state) - - diff --git a/GPy/core/mapping.py b/GPy/core/mapping.py index 513407eb..ef0af16c 100644 --- a/GPy/core/mapping.py +++ b/GPy/core/mapping.py @@ -1,10 +1,9 @@ # Copyright (c) 2013, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) -from ..util.plot import Tango, x_frame1D, x_frame2D +import sys from parameterization import Parameterized import numpy as np -import pylab as pb class Mapping(Parameterized): """ @@ -47,11 +46,8 @@ class Mapping(Parameterized): raise NotImplementedError - def plot(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue']): + def plot(self, *args): """ - - Plot the mapping. - Plots the mapping associated with the model. - In one dimension, the function is plotted. - In two dimsensions, a contour-plot shows the function @@ -60,68 +56,15 @@ class Mapping(Parameterized): Can plot only part of the data and part of the posterior functions using which_data and which_functions - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :type plot_limits: np.array - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param levels: number of levels to plot in a contour plot. - :type levels: int - :param samples: the number of a posteriori samples to plot - :type samples: int - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle - :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. - :type fixed_inputs: a list of tuples - :param linecol: color of line to plot. - :type linecol: - :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure - + This is a convenience function: arguments are passed to GPy.plotting.matplot_dep.models_plots.plot_mapping """ - # TODO include samples - if which_data == 'all': - which_data = slice(None) - - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - plotdims = self.input_dim - len(fixed_inputs) - - if plotdims == 1: - - Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now - - fixed_dims = np.array([i for i,v in fixed_inputs]) - freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims) - - Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits) - Xgrid = np.empty((Xnew.shape[0],self.input_dim)) - Xgrid[:,freedim] = Xnew - for i,v in fixed_inputs: - Xgrid[:,i] = v - - f = self.predict(Xgrid, which_parts=which_parts) - for d in range(y.shape[1]): - ax.plot(Xnew, f[:, d], edgecol=linecol) - - elif self.X.shape[1] == 2: - resolution = resolution or 50 - Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution) - x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) - f = self.predict(Xnew, which_parts=which_parts) - m = m.reshape(resolution, resolution).T - ax.contour(x, y, f, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable - ax.set_xlim(xmin[0], xmax[0]) - ax.set_ylim(xmin[1], xmax[1]) + if "matplotlib" in sys.modules: + from ..plotting.matplot_dep import models_plots + mapping_plots.plot_mapping(self,*args) else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + raise NameError, "matplotlib package has not been imported." + from model import Model @@ -135,14 +78,14 @@ class Mapping_check_model(Model): X = np.random.randn(num_samples, mapping.input_dim) if dL_df==None: dL_df = np.ones((num_samples, mapping.output_dim)) - + self.mapping=mapping self.X = X self.dL_df = dL_df self.num_params = self.mapping.num_params Model.__init__(self) - + def _get_params(self): return self.mapping._get_params() @@ -157,7 +100,7 @@ class Mapping_check_model(Model): def _log_likelihood_gradients(self): raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class." - + class Mapping_check_df_dtheta(Mapping_check_model): """This class allows gradient checks for the gradient of a mapping with respect to parameters. """ def __init__(self, mapping=None, dL_df=None, X=None): @@ -175,13 +118,13 @@ class Mapping_check_df_dX(Mapping_check_model): if dL_df==None: dL_df = np.ones((self.X.shape[0],self.mapping.output_dim)) self.num_params = self.X.shape[0]*self.mapping.input_dim - + def _log_likelihood_gradients(self): return self.mapping.df_dX(self.dL_df, self.X).flatten() def _get_param_names(self): return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] - + def _get_params(self): return self.X.flatten() diff --git a/GPy/core/parameterization/priors.py b/GPy/core/parameterization/priors.py index 9614ca53..f1208f18 100644 --- a/GPy/core/parameterization/priors.py +++ b/GPy/core/parameterization/priors.py @@ -3,7 +3,6 @@ import numpy as np -import pylab as pb from scipy.special import gammaln, digamma from ...util.linalg import pdinv from domains import _REAL, _POSITIVE @@ -12,16 +11,14 @@ import weakref class Prior: domain = None - + def pdf(self, x): return np.exp(self.lnpdf(x)) def plot(self): - rvs = self.rvs(1000) - pb.hist(rvs, 100, normed=True) - xmin, xmax = pb.xlim() - xx = np.linspace(xmin, xmax, 1000) - pb.plot(xx, self.pdf(xx), 'r', linewidth=2) + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import priors_plots + priors_plots.univariate_plot(self) class Gaussian(Prior): @@ -153,16 +150,9 @@ class MultivariateGaussian: return np.random.multivariate_normal(self.mu, self.var, n) def plot(self): - if self.input_dim == 2: - rvs = self.rvs(200) - pb.plot(rvs[:, 0], rvs[:, 1], 'kx', mew=1.5) - xmin, xmax = pb.xlim() - ymin, ymax = pb.ylim() - xx, yy = np.mgrid[xmin:xmax:100j, ymin:ymax:100j] - xflat = np.vstack((xx.flatten(), yy.flatten())).T - zz = self.pdf(xflat).reshape(100, 100) - pb.contour(xx, yy, zz, linewidths=2) - + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import priors_plots + priors_plots.multivariate_plot(self) def gamma_from_EV(E, V): warnings.warn("use Gamma.from_EV to create Gamma Prior", FutureWarning) diff --git a/GPy/core/parameterization/variational.py b/GPy/core/parameterization/variational.py index 25718fbf..e9868b82 100644 --- a/GPy/core/parameterization/variational.py +++ b/GPy/core/parameterization/variational.py @@ -11,7 +11,7 @@ from ...util.misc import param_to_array class Normal(Parameterized): ''' Normal distribution for variational approximations. - + holds the means and variances for a factorizing multivariate normal distribution ''' def __init__(self, means, variances, name='latent space'): @@ -20,47 +20,12 @@ class Normal(Parameterized): self.variances = Param('variance', variances) self.add_parameters(self.means, self.variances) - def plot(self, fignum=None, ax=None, colors=None): + def plot(self, *args): """ Plot latent space X in 1D: - - if fig is given, create input_dim subplots in fig and plot in these - - if ax is given plot input_dim 1D latent space plots of X into each `axis` - - if neither fig nor ax is given create a figure with fignum and plot in there - - colors: - colors of different latent space dimensions input_dim - + See GPy.plotting.matplot_dep.variational_plots """ - import pylab - if ax is None: - fig = pylab.figure(num=fignum, figsize=(8, min(12, (2 * self.means.shape[1])))) - if colors is None: - colors = pylab.gca()._get_lines.color_cycle - pylab.clf() - else: - colors = iter(colors) - plots = [] - means, variances = param_to_array(self.means, self.variances) - x = np.arange(means.shape[0]) - for i in range(means.shape[1]): - if ax is None: - a = fig.add_subplot(means.shape[1], 1, i + 1) - elif isinstance(ax, (tuple, list)): - a = ax[i] - else: - raise ValueError("Need one ax per latent dimension input_dim") - a.plot(means, c='k', alpha=.3) - plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) - a.fill_between(x, - means.T[i] - 2 * np.sqrt(variances.T[i]), - means.T[i] + 2 * np.sqrt(variances.T[i]), - facecolor=plots[-1].get_color(), - alpha=.3) - a.legend(borderaxespad=0.) - a.set_xlim(x.min(), x.max()) - if i < means.shape[1] - 1: - a.set_xticklabels('') - pylab.draw() - fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) - return fig + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import variational_plots + return variational_plots.plot(self,*args) diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index d91c2ca1..17ecb073 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -2,7 +2,7 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np -import pylab as pb +from ..util.linalg import mdot, tdot, symmetrify, backsub_both_sides, chol_inv, dtrtrs, dpotrs, dpotri from gp import GP from parameterization.param import Param from ..inference.latent_function_inference import varDTC @@ -73,83 +73,6 @@ class SparseGP(GP): #TODO!!! - def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): - """ - Plot the belief in the latent function, the "GP's view of the world" - - In one dimension, the function is plotted with a shaded region identifying two standard deviations. - - In two dimsensions, a contour-plot shows the mean predicted function - - Not implemented in higher dimensions - - :param samples: the number of a posteriori samples to plot - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param full_cov: - :type full_cov: bool - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle - - :param output: which output to plot (for multiple output models only) - :type output: integer (first output is 0) - """ - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - if fignum is None and ax is None: - fignum = fig.num - if which_data is 'all': - which_data = slice(None) - - GP.plot_f(self, samples=samples, plot_limits=plot_limits, which_data='all', which_parts='all', resolution=resolution, full_cov=full_cov, fignum=fignum, ax=ax) - - if self.X.shape[1] == 1: - if self.has_uncertain_inputs: - ax.errorbar(self.X[which_data, 0], self.likelihood.data[which_data, 0], - xerr=2 * np.sqrt(self.X_variance[which_data, 0]), - ecolor='k', fmt=None, elinewidth=.5, alpha=.5) - Zu = self.Z * self._Xscale + self._Xoffset - ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12) - - elif self.X.shape[1] == 2: - Zu = self.Z * self._Xscale + self._Xoffset - ax.plot(Zu[:, 0], Zu[:, 1], 'wo') - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - - def plot(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, fignum=None, ax=None): - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - if fignum is None and ax is None: - fignum = fig.num - if which_data is 'all': - which_data = slice(None) - - GP.plot(self, samples=samples, plot_limits=plot_limits, which_data='all', which_parts='all', resolution=resolution, levels=20, fignum=fignum, ax=ax) - - if self.X.shape[1] == 1: - if self.has_uncertain_inputs: - ax.errorbar(self.X[which_data, 0], self.likelihood.data[which_data, 0], - xerr=2 * np.sqrt(self.X_variance[which_data, 0]), - ecolor='k', fmt=None, elinewidth=.5, alpha=.5) - Zu = self.Z * self._Xscale + self._Xoffset - ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12) - - elif self.X.shape[1] == 2: - Zu = self.Z * self._Xscale + self._Xoffset - ax.plot(Zu[:, 0], Zu[:, 1], 'wo') - - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - def _getstate(self): """ Get the current state of the class, @@ -166,4 +89,3 @@ class SparseGP(GP): self.num_inducing = state.pop() self.Z = state.pop() GP._setstate(self, state) - diff --git a/GPy/core/svigp.py b/GPy/core/svigp.py index ea0de9e3..9e4f3b12 100644 --- a/GPy/core/svigp.py +++ b/GPy/core/svigp.py @@ -2,7 +2,6 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np -import pylab as pb from ..util.linalg import pdinv, mdot, tdot, dpotrs, dtrtrs, jitchol, backsub_both_sides from gp import GP import time @@ -480,38 +479,19 @@ class SVIGP(GP): return self.q_u_canonical_flat - def plot(self, ax=None, fignum=None, Z_height=None, **kwargs): + def plot(self, *args, **kwargs): + """ + See GPy.plotting.matplot_dep.svig_plots.plot + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import svig_plots + svig_plots.plot(self,*args,**kwargs) - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - #horrible hack here: - data = self.likelihood.data.copy() - self.likelihood.data = self.Y - GP.plot(self, ax=ax, **kwargs) - self.likelihood.data = data - - Zu = self.Z * self._Xscale + self._Xoffset - if self.input_dim==1: - ax.plot(self.X_batch, self.likelihood.data, 'gx',mew=2) - if Z_height is None: - Z_height = ax.get_ylim()[0] - ax.plot(Zu, np.zeros_like(Zu) + Z_height, 'r|', mew=1.5, markersize=12) - - if self.input_dim==2: - ax.scatter(self.X[:,0], self.X[:,1], 20., self.Y[:,0], linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable - ax.plot(Zu[:,0], Zu[:,1], 'w^') def plot_traces(self): - pb.figure() - t = np.array(self._param_trace) - pb.subplot(2,1,1) - for l,ti in zip(self._get_param_names(),t.T): - if not l[:3]=='iip': - pb.plot(ti,label=l) - pb.legend(loc=0) - - pb.subplot(2,1,2) - pb.plot(np.asarray(self._ll_trace),label='stochastic likelihood') - pb.legend(loc=0) + """ + See GPy.plotting.matplot_dep.svig_plots.plot_traces + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import svig_plots + svig_plots.plot_traces(self) diff --git a/GPy/inference/optimization/optimization.py b/GPy/inference/optimization/optimization.py index e65b862e..d9be46ce 100644 --- a/GPy/inference/optimization/optimization.py +++ b/GPy/inference/optimization/optimization.py @@ -1,7 +1,6 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) -import pylab as pb import datetime as dt from scipy import optimize from warnings import warn @@ -57,13 +56,13 @@ class Optimizer(): raise NotImplementedError, "this needs to be implemented to use the optimizer class" def plot(self): - if self.trace == None: - print "No trace present so I can't plot it. Please check that the optimizer actually supplies a trace." - else: - pb.figure() - pb.plot(self.trace) - pb.xlabel('Iteration') - pb.ylabel('f(x)') + """ + See GPy.plotting.matplot_dep.inference_plots + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import inference_plots + inference_plots.plot_optimizer(self) + def __str__(self): diagnostics = "Optimizer: \t\t\t\t %s\n" % self.opt_name diff --git a/GPy/inference/optimization/samplers.py b/GPy/inference/optimization/samplers.py index c2b47bce..fdb3df76 100644 --- a/GPy/inference/optimization/samplers.py +++ b/GPy/inference/optimization/samplers.py @@ -4,7 +4,6 @@ import numpy as np from scipy import linalg, optimize -import pylab as pb import Tango import sys import re @@ -80,6 +79,3 @@ class Metropolis_Hastings: fs.append(function(*args)) self.model._set_params(param)# reset model to starting state return fs - - - diff --git a/GPy/inference/optimization/sgd.py b/GPy/inference/optimization/sgd.py index 5cd144e8..fd089bf5 100644 --- a/GPy/inference/optimization/sgd.py +++ b/GPy/inference/optimization/sgd.py @@ -3,7 +3,6 @@ import scipy as sp import scipy.sparse from optimization import Optimizer from scipy import linalg, optimize -import pylab as plt import copy, sys, pickle class opt_SGD(Optimizer): @@ -68,16 +67,12 @@ class opt_SGD(Optimizer): return status def plot_traces(self): - plt.figure() - plt.subplot(211) - plt.title('Parameters') - for k in self.param_traces.keys(): - plt.plot(self.param_traces[k], label=k) - plt.legend(loc=0) - plt.subplot(212) - plt.title('Objective function') - plt.plot(self.fopt_trace) - + """ + See GPy.plotting.matplot_dep.inference_plots + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import inference_plots + inference_plots.plot_sgd_traces(self) def non_null_samples(self, data): return (np.isnan(data).sum(axis=1) == 0) @@ -289,7 +284,6 @@ class opt_SGD(Optimizer): b = len(features)/self.batch_size features = [features[i::b] for i in range(b)] NLL = [] - import pylab as plt for count, j in enumerate(features): self.Model.input_dim = len(j) self.Model.likelihood.input_dim = len(j) @@ -322,9 +316,6 @@ class opt_SGD(Optimizer): self.adapt_learning_rate(it+count, D) NLL.append(f) self.fopt_trace.append(NLL[-1]) - # fig = plt.figure('traces') - # plt.clf() - # plt.plot(self.param_traces['noise']) # for k in self.param_traces.keys(): # self.param_traces[k].append(self.Model.get(k)[0]) diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 196df206..de87ff14 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -3,9 +3,7 @@ import sys import numpy as np -import pylab as pb import itertools -from matplotlib.transforms import offset_copy from parts.prod import Prod as prod from parts.linear import Linear from parts.kernpart import Kernpart @@ -71,77 +69,14 @@ class kern(Parameterized): Parameterized._setstate(self, state) - def plot_ARD(self, fignum=None, ax=None, title='', legend=False): + def plot_ARD(self, *args): """If an ARD kernel is present, plot a bar representation using matplotlib - :param fignum: figure number of the plot - :param ax: matplotlib axis to plot on - :param title: - title of the plot, - pass '' to not print a title - pass None for a generic title + See GPy.plotting.matplot_dep.plot_ARD """ - if ax is None: - fig = pb.figure(fignum) - ax = fig.add_subplot(111) - else: - fig = ax.figure - from GPy.util import Tango - from matplotlib.textpath import TextPath - Tango.reset() - xticklabels = [] - bars = [] - x0 = 0 - for p in self._parameters_: - c = Tango.nextMedium() - if hasattr(p, 'ARD') and p.ARD: - if title is None: - ax.set_title('ARD parameters, %s kernel' % p.name) - else: - ax.set_title(title) - if isinstance(p, Linear): - ard_params = p.variances - else: - ard_params = 1. / p.lengthscale - - x = np.arange(x0, x0 + len(ard_params)) - bars.append(ax.bar(x, ard_params, align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name.replace("_"," "))) - xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))]) - x0 += len(ard_params) - x = np.arange(x0) - transOffset = offset_copy(ax.transData, fig=fig, - x=0., y= -2., units='points') - transOffsetUp = offset_copy(ax.transData, fig=fig, - x=0., y=1., units='points') - for bar in bars: - for patch, num in zip(bar.patches, np.arange(len(bar.patches))): - height = patch.get_height() - xi = patch.get_x() + patch.get_width() / 2. - va = 'top' - c = 'w' - t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center') - transform = transOffset - if patch.get_extents().height <= t.get_extents().height + 3: - va = 'bottom' - c = 'k' - transform = transOffsetUp - ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform) - # for xi, t in zip(x, xticklabels): - # ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center') - # ax.set_xticklabels(xticklabels, rotation=17) - ax.set_xticks([]) - ax.set_xlim(-.5, x0 - .5) - if legend: - if title is '': - mode = 'expand' - if len(bars) > 1: - mode = 'expand' - ax.legend(bbox_to_anchor=(0., 1.02, 1., 1.02), loc=3, - ncol=len(bars), mode=mode, borderaxespad=0.) - fig.tight_layout(rect=(0, 0, 1, .9)) - else: - ax.legend() - return ax + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import kernel_plots + return kernel_plots.plot_ARD(self,*args) # def _transform_gradients(self, g): # """ @@ -530,61 +465,13 @@ class kern(Parameterized): return target_mu, target_S - def plot(self, x=None, plot_limits=None, which_parts='all', resolution=None, *args, **kwargs): - if which_parts == 'all': - which_parts = [True] * self.size - if self.input_dim == 1: - if x is None: - x = np.zeros((1, 1)) - else: - x = np.asarray(x) - assert x.size == 1, "The size of the fixed variable x is not 1" - x = x.reshape((1, 1)) - - if plot_limits == None: - xmin, xmax = (x - 5).flatten(), (x + 5).flatten() - elif len(plot_limits) == 2: - xmin, xmax = plot_limits - else: - raise ValueError, "Bad limits for plotting" - - Xnew = np.linspace(xmin, xmax, resolution or 201)[:, None] - Kx = self.K(Xnew, x, which_parts) - pb.plot(Xnew, Kx, *args, **kwargs) - pb.xlim(xmin, xmax) - pb.xlabel("x") - pb.ylabel("k(x,%0.1f)" % x) - - elif self.input_dim == 2: - if x is None: - x = np.zeros((1, 2)) - else: - x = np.asarray(x) - assert x.size == 2, "The size of the fixed variable x is not 2" - x = x.reshape((1, 2)) - - if plot_limits == None: - xmin, xmax = (x - 5).flatten(), (x + 5).flatten() - elif len(plot_limits) == 2: - xmin, xmax = plot_limits - else: - raise ValueError, "Bad limits for plotting" - - resolution = resolution or 51 - xx, yy = np.mgrid[xmin[0]:xmax[0]:1j * resolution, xmin[1]:xmax[1]:1j * resolution] - xg = np.linspace(xmin[0], xmax[0], resolution) - yg = np.linspace(xmin[1], xmax[1], resolution) - Xnew = np.vstack((xx.flatten(), yy.flatten())).T - Kx = self.K(Xnew, x, which_parts) - Kx = Kx.reshape(resolution, resolution).T - pb.contour(xg, yg, Kx, vmin=Kx.min(), vmax=Kx.max(), cmap=pb.cm.jet, *args, **kwargs) # @UndefinedVariable - pb.xlim(xmin[0], xmax[0]) - pb.ylim(xmin[1], xmax[1]) - pb.xlabel("x1") - pb.ylabel("x2") - pb.title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1])) - else: - raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions" + def plot(self, *args, **kwargs): + """ + See GPy.plotting.matplot_dep.plot + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import kernel_plots + kernel_plots.plot(self,*args) from GPy.core.model import Model diff --git a/GPy/likelihoods/likelihood.py b/GPy/likelihoods/likelihood.py index 8ae3174e..b0ecfc37 100644 --- a/GPy/likelihoods/likelihood.py +++ b/GPy/likelihoods/likelihood.py @@ -4,8 +4,6 @@ import numpy as np from scipy import stats,special import scipy as sp -import pylab as pb -from ..util.plot import gpplot from ..util.univariate_Gaussian import std_norm_pdf,std_norm_cdf import link_functions from ..util.misc import chain_1, chain_2, chain_3 diff --git a/GPy/likelihoods/link_functions.py b/GPy/likelihoods/link_functions.py index 9c046223..2a1bf147 100644 --- a/GPy/likelihoods/link_functions.py +++ b/GPy/likelihoods/link_functions.py @@ -4,7 +4,6 @@ import numpy as np from scipy import stats import scipy as sp -import pylab as pb from GPy.util.univariate_Gaussian import std_norm_pdf,std_norm_cdf,inv_std_norm_cdf class GPTransformation(object): diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index b806ea31..7a22b5ea 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -3,14 +3,12 @@ import numpy as np import itertools -from matplotlib import pyplot from gplvm import GPLVM from .. import kern from ..core import SparseGP from ..likelihoods import Gaussian from ..inference.optimization import SCG -from ..util import plot_latent, linalg -from ..util.plot_latent import most_significant_input_dimensions +from ..util import linalg from ..core.parameterization.variational import Normal class BayesianGPLVM(SparseGP, GPLVM): @@ -75,11 +73,11 @@ class BayesianGPLVM(SparseGP, GPLVM): # """ # Horizontally stacks the parameters in order to present them to the optimizer. # The resulting 1-input_dim array has this structure: -# +# # =============================================================== # | mu | S | Z | theta | beta | # =============================================================== -# +# # """ # x = np.hstack((self.X.flatten(), self.X_variance.flatten(), SparseGP._get_params(self))) # return x @@ -131,7 +129,13 @@ class BayesianGPLVM(SparseGP, GPLVM): # return np.hstack((self.dbound_dmuS.flatten(), self.dbound_dZtheta)) def plot_latent(self, plot_inducing=True, *args, **kwargs): - return plot_latent.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs) + """ + See GPy.plotting.matplot_dep.dim_reduction_plots.plot_latent + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import dim_reduction_plots + + return dim_reduction_plots.plot_latent(self, plot_inducing=plot_inducing, *args, **kwargs) def do_test_latents(self, Y): """ @@ -190,65 +194,14 @@ class BayesianGPLVM(SparseGP, GPLVM): dK_dX[:, i] = self.kern.dK_dX(ones, Xnew, self.Z[i:i + 1, :]).sum(-1) return np.dot(dK_dX, self.Cpsi1Vf) - def plot_steepest_gradient_map(self, fignum=None, ax=None, which_indices=None, labels=None, data_labels=None, data_marker='o', data_s=40, resolution=20, aspect='auto', updates=False, ** kwargs): - input_1, input_2 = significant_dims = most_significant_input_dimensions(self, which_indices) + def plot_steepest_gradient_map(self, *args, ** kwargs): + """ + See GPy.plotting.matplot_dep.dim_reduction_plots.plot_steepest_gradient_map + """ + assert "matplotlib" in sys.modules, "matplotlib package has not been imported." + from ..plotting.matplot_dep import dim_reduction_plots - X = np.zeros((resolution ** 2, self.input_dim)) - indices = np.r_[:X.shape[0]] - if labels is None: - labels = range(self.output_dim) - - def plot_function(x): - X[:, significant_dims] = x - dmu_dX = self.dmu_dXnew(X) - argmax = np.argmax(dmu_dX, 1) - return dmu_dX[indices, argmax], np.array(labels)[argmax] - - if ax is None: - fig = pyplot.figure(num=fignum) - ax = fig.add_subplot(111) - - if data_labels is None: - data_labels = np.ones(self.num_data) - ulabels = [] - for lab in data_labels: - if not lab in ulabels: - ulabels.append(lab) - marker = itertools.cycle(list(data_marker)) - from GPy.util import Tango - for i, ul in enumerate(ulabels): - if type(ul) is np.string_: - this_label = ul - elif type(ul) is np.int64: - this_label = 'class %i' % ul - else: - this_label = 'class %i' % i - m = marker.next() - index = np.nonzero(data_labels == ul)[0] - x = self.X[index, input_1] - y = self.X[index, input_2] - ax.scatter(x, y, marker=m, s=data_s, color=Tango.nextMedium(), label=this_label) - - ax.set_xlabel('latent dimension %i' % input_1) - ax.set_ylabel('latent dimension %i' % input_2) - - from matplotlib.cm import get_cmap - from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImAnnotateController - controller = ImAnnotateController(ax, - plot_function, - tuple(self.X.min(0)[:, significant_dims]) + tuple(self.X.max(0)[:, significant_dims]), - resolution=resolution, - aspect=aspect, - cmap=get_cmap('jet'), - **kwargs) - ax.legend() - ax.figure.tight_layout() - if updates: - pyplot.show() - clear = raw_input('Enter to continue') - if clear.lower() in 'yes' or clear == '': - controller.deactivate() - return controller.view + return dim_reduction_plots.plot_steepest_gradient_map(model,*args,**kwargs) def latent_cost_and_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): """ @@ -304,5 +257,3 @@ def latent_grad(mu_S, kern, Z, dL_dpsi0, dL_dpsi1, dL_dpsi2): dlnS = S * (S0 + S1 + S2 - 0.5) + .5 return -np.hstack((dmu.flatten(), dlnS.flatten())) - - diff --git a/GPy/util/Tango.py b/GPy/plotting/matplot_dep/Tango.py similarity index 100% rename from GPy/util/Tango.py rename to GPy/plotting/matplot_dep/Tango.py diff --git a/GPy/plotting/matplot_dep/__init__.py b/GPy/plotting/matplot_dep/__init__.py new file mode 100644 index 00000000..b2a29c2d --- /dev/null +++ b/GPy/plotting/matplot_dep/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2014, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import base_plots +import models_plots +import priors_plots +import variational_plots +import kernel_plots +import svig_plots +import dim_reduction_plots +import mapping_plots +import Tango +import visualize +import latent_space_visualizations +import netpbmfile +import inference_plots diff --git a/GPy/util/plot.py b/GPy/plotting/matplot_dep/base_plots.py similarity index 97% rename from GPy/util/plot.py rename to GPy/plotting/matplot_dep/base_plots.py index f44864f3..d5d4d6ee 100644 --- a/GPy/util/plot.py +++ b/GPy/plotting/matplot_dep/base_plots.py @@ -71,8 +71,8 @@ def align_subplots(N,M,xlim=None, ylim=None): removeUpperTicks() def align_subplot_array(axes,xlim=None, ylim=None): - """make all of the axes in the array hae the same limits, turn off unnecessary ticks - + """ + Make all of the axes in the array hae the same limits, turn off unnecessary ticks use pb.subplots() to get an array of axes """ #find sensible xlim,ylim diff --git a/GPy/util/plot_latent.py b/GPy/plotting/matplot_dep/dim_reduction_plots.py similarity index 71% rename from GPy/util/plot_latent.py rename to GPy/plotting/matplot_dep/dim_reduction_plots.py index fbf62c41..74292c05 100644 --- a/GPy/util/plot_latent.py +++ b/GPy/plotting/matplot_dep/dim_reduction_plots.py @@ -1,11 +1,16 @@ import pylab as pb import numpy as np -from .. import util -from GPy.util.latent_space_visualizations.controllers.imshow_controller import ImshowController -from misc import param_to_array +from ... import util +from latent_space_visualizations.controllers.imshow_controller import ImshowController,ImAnnotateController +from GPy.util.misc import param_to_array import itertools +import Tango +from matplotlib.cm import get_cmap def most_significant_input_dimensions(model, which_indices): + """ + Determine which dimensions should be plotted + """ if which_indices is None: if model.input_dim == 1: input_1 = 0 @@ -39,7 +44,7 @@ def plot_latent(model, labels=None, which_indices=None, input_1, input_2 = most_significant_input_dimensions(model, which_indices) X = param_to_array(model.X) - + # first, plot the output variance as a function of the latent space Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(X[:, [input_1, input_2]], resolution=resolution) Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) @@ -49,6 +54,7 @@ def plot_latent(model, labels=None, which_indices=None, mu, var, low, up = model.predict(Xtest_full) var = var[:, :1] return np.log(var) + view = ImshowController(ax, plot_function, tuple(X[:, [input_1, input_2]].min(0)) + tuple(X[:, [input_1, input_2]].max(0)), resolution, aspect=aspect, interpolation='bilinear', @@ -124,10 +130,12 @@ def plot_magnification(model, labels=None, which_indices=None, # first, plot the output variance as a function of the latent space Xtest, xx, yy, xmin, xmax = util.plot.x_frame2D(model.X[:, [input_1, input_2]], resolution=resolution) Xtest_full = np.zeros((Xtest.shape[0], model.X.shape[1])) + def plot_function(x): Xtest_full[:, [input_1, input_2]] = x mf=model.magnification(Xtest_full) return mf + view = ImshowController(ax, plot_function, tuple(model.X.min(0)[:, [input_1, input_2]]) + tuple(model.X.max(0)[:, [input_1, input_2]]), resolution, aspect=aspect, interpolation='bilinear', @@ -179,3 +187,62 @@ def plot_magnification(model, labels=None, which_indices=None, pb.title('Magnification Factor') return ax + + +def plot_steepest_gradient_map(model, fignum=None, ax=None, which_indices=None, labels=None, data_labels=None, data_marker='o', data_s=40, resolution=20, aspect='auto', updates=False, ** kwargs): + + input_1, input_2 = significant_dims = most_significant_input_dimensions(model, which_indices) + + X = np.zeros((resolution ** 2, model.input_dim)) + indices = np.r_[:X.shape[0]] + if labels is None: + labels = range(model.output_dim) + + def plot_function(x): + X[:, significant_dims] = x + dmu_dX = model.dmu_dXnew(X) + argmax = np.argmax(dmu_dX, 1) + return dmu_dX[indices, argmax], np.array(labels)[argmax] + + if ax is None: + fig = pyplot.figure(num=fignum) + ax = fig.add_subplot(111) + + if data_labels is None: + data_labels = np.ones(model.num_data) + ulabels = [] + for lab in data_labels: + if not lab in ulabels: + ulabels.append(lab) + marker = itertools.cycle(list(data_marker)) + for i, ul in enumerate(ulabels): + if type(ul) is np.string_: + this_label = ul + elif type(ul) is np.int64: + this_label = 'class %i' % ul + else: + this_label = 'class %i' % i + m = marker.next() + index = np.nonzero(data_labels == ul)[0] + x = model.X[index, input_1] + y = model.X[index, input_2] + ax.scatter(x, y, marker=m, s=data_s, color=Tango.nextMedium(), label=this_label) + + ax.set_xlabel('latent dimension %i' % input_1) + ax.set_ylabel('latent dimension %i' % input_2) + + controller = ImAnnotateController(ax, + plot_function, + tuple(model.X.min(0)[:, significant_dims]) + tuple(model.X.max(0)[:, significant_dims]), + resolution=resolution, + aspect=aspect, + cmap=get_cmap('jet'), + **kwargs) + ax.legend() + ax.figure.tight_layout() + if updates: + pyplot.show() + clear = raw_input('Enter to continue') + if clear.lower() in 'yes' or clear == '': + controller.deactivate() + return controller.view diff --git a/GPy/plotting/matplot_dep/inference_plots.py b/GPy/plotting/matplot_dep/inference_plots.py new file mode 100644 index 00000000..f9bb464a --- /dev/null +++ b/GPy/plotting/matplot_dep/inference_plots.py @@ -0,0 +1,28 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import pylab as pb +#import numpy as np +#import Tango +#from base_plots import gpplot, x_frame1D, x_frame2D + + +def plot_optimizer(optimizer): + if optimizer.trace == None: + print "No trace present so I can't plot it. Please check that the optimizer actually supplies a trace." + else: + pb.figure() + pb.plot(optimizer.trace) + pb.xlabel('Iteration') + pb.ylabel('f(x)') + +def plot_sgd_traces(optimizer): + pb.figure() + pb.subplot(211) + pb.title('Parameters') + for k in optimizer.param_traces.keys(): + pb.plot(optimizer.param_traces[k], label=k) + pb.legend(loc=0) + pb.subplot(212) + pb.title('Objective function') + pb.plot(optimizer.fopt_trace) diff --git a/GPy/plotting/matplot_dep/kernel_plots.py b/GPy/plotting/matplot_dep/kernel_plots.py new file mode 100644 index 00000000..66644483 --- /dev/null +++ b/GPy/plotting/matplot_dep/kernel_plots.py @@ -0,0 +1,137 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import sys +import numpy as np +import pylab as pb +import Tango +from matplotlib.textpath import TextPath +from matplotlib.transforms import offset_copy + + +def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False): + """If an ARD kernel is present, plot a bar representation using matplotlib + + :param fignum: figure number of the plot + :param ax: matplotlib axis to plot on + :param title: + title of the plot, + pass '' to not print a title + pass None for a generic title + """ + if ax is None: + fig = pb.figure(fignum) + ax = fig.add_subplot(111) + else: + fig = ax.figure + Tango.reset() + xticklabels = [] + bars = [] + x0 = 0 + for p in kernel._parameters_: + c = Tango.nextMedium() + if hasattr(p, 'ARD') and p.ARD: + if title is None: + ax.set_title('ARD parameters, %s kernel' % p.name) + else: + ax.set_title(title) + if isinstance(p, Linear): + ard_params = p.variances + else: + ard_params = 1. / p.lengthscale + + x = np.arange(x0, x0 + len(ard_params)) + bars.append(ax.bar(x, ard_params, align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name.replace("_"," "))) + xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))]) + x0 += len(ard_params) + x = np.arange(x0) + transOffset = offset_copy(ax.transData, fig=fig, + x=0., y= -2., units='points') + transOffsetUp = offset_copy(ax.transData, fig=fig, + x=0., y=1., units='points') + for bar in bars: + for patch, num in zip(bar.patches, np.arange(len(bar.patches))): + height = patch.get_height() + xi = patch.get_x() + patch.get_width() / 2. + va = 'top' + c = 'w' + t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center') + transform = transOffset + if patch.get_extents().height <= t.get_extents().height + 3: + va = 'bottom' + c = 'k' + transform = transOffsetUp + ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform) + # for xi, t in zip(x, xticklabels): + # ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center') + # ax.set_xticklabels(xticklabels, rotation=17) + ax.set_xticks([]) + ax.set_xlim(-.5, x0 - .5) + if legend: + if title is '': + mode = 'expand' + if len(bars) > 1: + mode = 'expand' + ax.legend(bbox_to_anchor=(0., 1.02, 1., 1.02), loc=3, + ncol=len(bars), mode=mode, borderaxespad=0.) + fig.tight_layout(rect=(0, 0, 1, .9)) + else: + ax.legend() + return ax + + +def plot(kernel, x=None, plot_limits=None, which_parts='all', resolution=None, *args, **kwargs): + if which_parts == 'all': + which_parts = [True] * kernel.size + if kernel.input_dim == 1: + if x is None: + x = np.zeros((1, 1)) + else: + x = np.asarray(x) + assert x.size == 1, "The size of the fixed variable x is not 1" + x = x.reshape((1, 1)) + + if plot_limits == None: + xmin, xmax = (x - 5).flatten(), (x + 5).flatten() + elif len(plot_limits) == 2: + xmin, xmax = plot_limits + else: + raise ValueError, "Bad limits for plotting" + + Xnew = np.linspace(xmin, xmax, resolution or 201)[:, None] + Kx = kernel.K(Xnew, x, which_parts) + pb.plot(Xnew, Kx, *args, **kwargs) + pb.xlim(xmin, xmax) + pb.xlabel("x") + pb.ylabel("k(x,%0.1f)" % x) + + elif kernel.input_dim == 2: + if x is None: + x = np.zeros((1, 2)) + else: + x = np.asarray(x) + assert x.size == 2, "The size of the fixed variable x is not 2" + x = x.reshape((1, 2)) + + if plot_limits == None: + xmin, xmax = (x - 5).flatten(), (x + 5).flatten() + elif len(plot_limits) == 2: + xmin, xmax = plot_limits + else: + raise ValueError, "Bad limits for plotting" + + resolution = resolution or 51 + xx, yy = np.mgrid[xmin[0]:xmax[0]:1j * resolution, xmin[1]:xmax[1]:1j * resolution] + xg = np.linspace(xmin[0], xmax[0], resolution) + yg = np.linspace(xmin[1], xmax[1], resolution) + Xnew = np.vstack((xx.flatten(), yy.flatten())).T + Kx = kernel.K(Xnew, x, which_parts) + Kx = Kx.reshape(resolution, resolution).T + pb.contour(xg, yg, Kx, vmin=Kx.min(), vmax=Kx.max(), cmap=pb.cm.jet, *args, **kwargs) # @UndefinedVariable + pb.xlim(xmin[0], xmax[0]) + pb.ylim(xmin[1], xmax[1]) + pb.xlabel("x1") + pb.ylabel("x2") + pb.title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1])) + else: + raise NotImplementedError, "Cannot plot a kernel with more than two input dimensions" diff --git a/GPy/util/latent_space_visualizations/__init__.py b/GPy/plotting/matplot_dep/latent_space_visualizations/__init__.py similarity index 100% rename from GPy/util/latent_space_visualizations/__init__.py rename to GPy/plotting/matplot_dep/latent_space_visualizations/__init__.py diff --git a/GPy/util/latent_space_visualizations/controllers/__init__.py b/GPy/plotting/matplot_dep/latent_space_visualizations/controllers/__init__.py similarity index 100% rename from GPy/util/latent_space_visualizations/controllers/__init__.py rename to GPy/plotting/matplot_dep/latent_space_visualizations/controllers/__init__.py diff --git a/GPy/util/latent_space_visualizations/controllers/axis_event_controller.py b/GPy/plotting/matplot_dep/latent_space_visualizations/controllers/axis_event_controller.py similarity index 99% rename from GPy/util/latent_space_visualizations/controllers/axis_event_controller.py rename to GPy/plotting/matplot_dep/latent_space_visualizations/controllers/axis_event_controller.py index acb1ac8d..d5aaefd2 100644 --- a/GPy/util/latent_space_visualizations/controllers/axis_event_controller.py +++ b/GPy/plotting/matplot_dep/latent_space_visualizations/controllers/axis_event_controller.py @@ -80,13 +80,13 @@ class AxisChangedController(AxisEventController): class BufferedAxisChangedController(AxisChangedController): def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=None, **kwargs): """ - :param plot_function: + :param plot_function: function to use for creating image for plotting (return ndarray-like) plot_function gets called with (2D!) Xtest grid if replotting required :type plot_function: function :param plot_limits: beginning plot limits [xmin, ymin, xmax, ymax] - + :param kwargs: additional kwargs are for pyplot.imshow(**kwargs) """ super(BufferedAxisChangedController, self).__init__(ax, update_lim=update_lim) @@ -137,6 +137,3 @@ class BufferedAxisChangedController(AxisChangedController): except: buffersize = .4 return buffersize - - - diff --git a/GPy/util/latent_space_visualizations/controllers/imshow_controller.py b/GPy/plotting/matplot_dep/latent_space_visualizations/controllers/imshow_controller.py similarity index 94% rename from GPy/util/latent_space_visualizations/controllers/imshow_controller.py rename to GPy/plotting/matplot_dep/latent_space_visualizations/controllers/imshow_controller.py index fa6682e9..b473dd96 100644 --- a/GPy/util/latent_space_visualizations/controllers/imshow_controller.py +++ b/GPy/plotting/matplot_dep/latent_space_visualizations/controllers/imshow_controller.py @@ -3,7 +3,7 @@ Created on 24 Jul 2013 @author: maxz ''' -from GPy.util.latent_space_visualizations.controllers.axis_event_controller import BufferedAxisChangedController +from axis_event_controller import BufferedAxisChangedController import itertools import numpy @@ -11,13 +11,13 @@ import numpy class ImshowController(BufferedAxisChangedController): def __init__(self, ax, plot_function, plot_limits, resolution=50, update_lim=.5, **kwargs): """ - :param plot_function: + :param plot_function: function to use for creating image for plotting (return ndarray-like) plot_function gets called with (2D!) Xtest grid if replotting required :type plot_function: function :param plot_limits: beginning plot limits [xmin, ymin, xmax, ymax] - + :param kwargs: additional kwargs are for pyplot.imshow(**kwargs) """ super(ImshowController, self).__init__(ax, plot_function, plot_limits, resolution, update_lim, **kwargs) @@ -36,7 +36,7 @@ class ImshowController(BufferedAxisChangedController): class ImAnnotateController(ImshowController): def __init__(self, ax, plot_function, plot_limits, resolution=20, update_lim=.99, **kwargs): """ - :param plot_function: + :param plot_function: function to use for creating image for plotting (return ndarray-like) plot_function gets called with (2D!) Xtest grid if replotting required :type plot_function: function diff --git a/GPy/plotting/matplot_dep/mapping_plots.py b/GPy/plotting/matplot_dep/mapping_plots.py new file mode 100644 index 00000000..3e3ea793 --- /dev/null +++ b/GPy/plotting/matplot_dep/mapping_plots.py @@ -0,0 +1,81 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import pylab as pb +import numpy as np +import Tango +from base_plots import x_frame1D, x_frame2D + + +def plot_mapping(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue']): + """ + Plots the mapping associated with the model. + - In one dimension, the function is plotted. + - In two dimsensions, a contour-plot shows the function + - In higher dimensions, we've not implemented this yet !TODO! + + Can plot only part of the data and part of the posterior functions + using which_data and which_functions + + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :type plot_limits: np.array + :param which_data: which if the training data to plot (default all) + :type which_data: 'all' or a slice object to slice self.X, self.Y + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param levels: number of levels to plot in a contour plot. + :type levels: int + :param samples: the number of a posteriori samples to plot + :type samples: int + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples + :param linecol: color of line to plot. + :type linecol: + :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + + """ + # TODO include samples + if which_data == 'all': + which_data = slice(None) + + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + plotdims = self.input_dim - len(fixed_inputs) + + if plotdims == 1: + + Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now + + fixed_dims = np.array([i for i,v in fixed_inputs]) + freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims) + + Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits) + Xgrid = np.empty((Xnew.shape[0],self.input_dim)) + Xgrid[:,freedim] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v + + f = self.predict(Xgrid, which_parts=which_parts) + for d in range(y.shape[1]): + ax.plot(Xnew, f[:, d], edgecol=linecol) + + elif self.X.shape[1] == 2: + resolution = resolution or 50 + Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution) + x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) + f = self.predict(Xnew, which_parts=which_parts) + m = m.reshape(resolution, resolution).T + ax.contour(x, y, f, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable + ax.set_xlim(xmin[0], xmax[0]) + ax.set_ylim(xmin[1], xmax[1]) + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" diff --git a/GPy/util/maps.py b/GPy/plotting/matplot_dep/maps.py similarity index 100% rename from GPy/util/maps.py rename to GPy/plotting/matplot_dep/maps.py diff --git a/GPy/plotting/matplot_dep/models_plots.py b/GPy/plotting/matplot_dep/models_plots.py new file mode 100644 index 00000000..a4e06441 --- /dev/null +++ b/GPy/plotting/matplot_dep/models_plots.py @@ -0,0 +1,161 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import pylab as pb +import numpy as np +import Tango +from base_plots import gpplot, x_frame1D, x_frame2D + + +def plot_fit(model, plot_limits=None, which_data_rows='all', + which_data_ycols='all', which_parts='all', fixed_inputs=[], + levels=20, samples=0, fignum=None, ax=None, resolution=None, + plot_raw=False, + linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): + """ + Plot the posterior of the GP. + - In one dimension, the function is plotted with a shaded region identifying two standard deviations. + - In two dimsensions, a contour-plot shows the mean predicted function + - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. + + Can plot only part of the data and part of the posterior functions + using which_data_rowsm which_data_ycols and which_parts + + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :type plot_limits: np.array + :param which_data_rows: which of the training data to plot (default all) + :type which_data_rows: 'all' or a slice object to slice model.X, model.Y + :param which_data_ycols: when the data has several columns (independant outputs), only plot these + :type which_data_rows: 'all' or a list of integers + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param levels: number of levels to plot in a contour plot. + :type levels: int + :param samples: the number of a posteriori samples to plot + :type samples: int + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + :type output: integer (first output is 0) + :param linecol: color of line to plot. + :type linecol: + :param fillcol: color of fill + :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + """ + #deal with optional arguments + if which_data_rows == 'all': + which_data_rows = slice(None) + if which_data_ycols == 'all': + which_data_ycols = np.arange(model.output_dim) + if len(which_data_ycols)==0: + raise ValueError('No data selected for plotting') + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + #work out what the inputs are for plotting (1D or 2D) + fixed_dims = np.array([i for i,v in fixed_inputs]) + free_dims = np.setdiff1d(np.arange(model.input_dim),fixed_dims) + + #one dimensional plotting + if len(free_dims) == 1: + + #define the frame on which to plot + resolution = resolution or 200 + Xnew, xmin, xmax = x_frame1D(model.X[:,free_dims], plot_limits=plot_limits) + Xgrid = np.empty((Xnew.shape[0],model.input_dim)) + Xgrid[:,free_dims] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v + + #make a prediction on the frame and plot it + if plot_raw: + m, v = model._raw_predict(Xgrid, which_parts=which_parts) + lower = m - 2*np.sqrt(v) + upper = m + 2*np.sqrt(v) + Y = model.Y + else: + m, v, lower, upper = model.predict(Xgrid, which_parts=which_parts) + Y = model.Y + for d in which_data_ycols: + gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) + ax.plot(model.X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) + + #optionally plot some samples + if samples: #NOTE not tested with fixed_inputs + Ysim = model.posterior_samples(Xgrid, samples, which_parts=which_parts) + for yi in Ysim.T: + ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) + #ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs. + + #add inducing inputs (if a sparse model is used) + if hasattr(model,"Z"): + Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims] + ax.plot(Zu, np.zeros_like(Zu) + ax.get_ylim()[0], 'r|', mew=1.5, markersize=12) + + #add error bars for uncertain (if input uncertainty is being modelled) + if hasattr(model,"has_uncertain_inputs"): + ax.errorbar(model.X[which_data, free_dims], model.likelihood.data[which_data, 0], + xerr=2 * np.sqrt(model.X_variance[which_data, free_dims]), + ecolor='k', fmt=None, elinewidth=.5, alpha=.5) + + + #set the limits of the plot to some sensible values + ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper)) + ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) + ax.set_xlim(xmin, xmax) + ax.set_ylim(ymin, ymax) + + #2D plotting + elif len(free_dims) == 2: + + #define the frame for plotting on + resolution = resolution or 50 + Xnew, _, _, xmin, xmax = x_frame2D(model.X[:,free_dims], plot_limits, resolution) + Xgrid = np.empty((Xnew.shape[0],model.input_dim)) + Xgrid[:,free_dims] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v + x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) + + #predict on the frame and plot + if plot_raw: + m, _ = model._raw_predict(Xgrid, which_parts=which_parts) + Y = model.likelihood.Y + else: + m, _, _, _ = model.predict(Xgrid, which_parts=which_parts,sampling=False) + Y = model.likelihood.data + for d in which_data_ycols: + m_d = m[:,d].reshape(resolution, resolution).T + ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) + ax.scatter(model.X[which_data_rows, free_dims[0]], model.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) + + #set the limits of the plot to some sensible values + ax.set_xlim(xmin[0], xmax[0]) + ax.set_ylim(xmin[1], xmax[1]) + + if samples: + warnings.warn("Samples are rather difficult to plot for 2D inputs...") + + #add inducing inputs (if a sparse model is used) + if hasattr(model,"Z"): + Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims] + ax.plot(Zu[:,free_dims[0]], Zu[:,free_dims[1]], 'wo') + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" + + +def plot_f_fit(model, *args, **kwargs): + """ + Plot the GP's view of the world, where the data is normalized and before applying a likelihood. + + All args and kwargs are passed on to models_plots.plot. + """ + kwargs['plot_raw'] = True + plot(model,*args, **kwargs) diff --git a/GPy/util/netpbmfile.py b/GPy/plotting/matplot_dep/netpbmfile.py similarity index 100% rename from GPy/util/netpbmfile.py rename to GPy/plotting/matplot_dep/netpbmfile.py diff --git a/GPy/plotting/matplot_dep/priors_plots.py b/GPy/plotting/matplot_dep/priors_plots.py new file mode 100644 index 00000000..af999740 --- /dev/null +++ b/GPy/plotting/matplot_dep/priors_plots.py @@ -0,0 +1,29 @@ +# Copyright (c) 2012, GPy authors (see AUTHORS.txt). +# Licensed under the BSD 3-clause license (see LICENSE.txt) + + +import numpy as np +import pylab as pb + + +def univariate_plot(prior): + rvs = prior.rvs(1000) + pb.hist(rvs, 100, normed=True) + xmin, xmax = pb.xlim() + xx = np.linspace(xmin, xmax, 1000) + pb.plot(xx, prior.pdf(xx), 'r', linewidth=2) + +def plot(prior): + + if prior.input_dim == 2: + rvs = prior.rvs(200) + pb.plot(rvs[:, 0], rvs[:, 1], 'kx', mew=1.5) + xmin, xmax = pb.xlim() + ymin, ymax = pb.ylim() + xx, yy = np.mgrid[xmin:xmax:100j, ymin:ymax:100j] + xflat = np.vstack((xx.flatten(), yy.flatten())).T + zz = prior.pdf(xflat).reshape(100, 100) + pb.contour(xx, yy, zz, linewidths=2) + + else: + raise NotImplementedError, "Cannot define a frame with more than two input dimensions" diff --git a/GPy/plotting/matplot_dep/svig_plots.py b/GPy/plotting/matplot_dep/svig_plots.py new file mode 100644 index 00000000..95344643 --- /dev/null +++ b/GPy/plotting/matplot_dep/svig_plots.py @@ -0,0 +1,43 @@ +# Copyright (c) 2012, James Hensman and Nicolo' Fusi +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +import pylab as pb + + +def plot(model, ax=None, fignum=None, Z_height=None, **kwargs): + + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + #horrible hack here: + data = model.likelihood.data.copy() + model.likelihood.data = model.Y + GP.plot(model, ax=ax, **kwargs) + model.likelihood.data = data + + Zu = model.Z * model._Xscale + model._Xoffset + if model.input_dim==1: + ax.plot(model.X_batch, model.likelihood.data, 'gx',mew=2) + if Z_height is None: + Z_height = ax.get_ylim()[0] + ax.plot(Zu, np.zeros_like(Zu) + Z_height, 'r|', mew=1.5, markersize=12) + + if model.input_dim==2: + ax.scatter(model.X[:,0], model.X[:,1], 20., model.Y[:,0], linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable + ax.plot(Zu[:,0], Zu[:,1], 'w^') + +def plot_traces(model): + + pb.figure() + t = np.array(model._param_trace) + pb.subplot(2,1,1) + for l,ti in zip(model._get_param_names(),t.T): + if not l[:3]=='iip': + pb.plot(ti,label=l) + pb.legend(loc=0) + + pb.subplot(2,1,2) + pb.plot(np.asarray(model._ll_trace),label='stochastic likelihood') + pb.legend(loc=0) diff --git a/GPy/plotting/matplot_dep/variational_plots.py b/GPy/plotting/matplot_dep/variational_plots.py new file mode 100644 index 00000000..9f791dd1 --- /dev/null +++ b/GPy/plotting/matplot_dep/variational_plots.py @@ -0,0 +1,45 @@ +import pylab as pb + +def plot(parameterized, fignum=None, ax=None, colors=None): + """ + Plot latent space X in 1D: + + - if fig is given, create input_dim subplots in fig and plot in these + - if ax is given plot input_dim 1D latent space plots of X into each `axis` + - if neither fig nor ax is given create a figure with fignum and plot in there + + colors: + colors of different latent space dimensions input_dim + + """ + if ax is None: + fig = pb.figure(num=fignum, figsize=(8, min(12, (2 * parameterized.means.shape[1])))) + if colors is None: + colors = pb.gca()._get_lines.color_cycle + pb.clf() + else: + colors = iter(colors) + plots = [] + means, variances = param_to_array(parameterized.means, parameterized.variances) + x = np.arange(means.shape[0]) + for i in range(means.shape[1]): + if ax is None: + a = fig.add_subplot(means.shape[1], 1, i + 1) + elif isinstance(ax, (tuple, list)): + a = ax[i] + else: + raise ValueError("Need one ax per latent dimension input_dim") + a.plot(means, c='k', alpha=.3) + plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i))) + a.fill_between(x, + means.T[i] - 2 * np.sqrt(variances.T[i]), + means.T[i] + 2 * np.sqrt(variances.T[i]), + facecolor=plots[-1].get_color(), + alpha=.3) + a.legend(borderaxespad=0.) + a.set_xlim(x.min(), x.max()) + if i < means.shape[1] - 1: + a.set_xticklabels('') + pb.draw() + fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95)) + return fig diff --git a/GPy/util/visualize.py b/GPy/plotting/matplot_dep/visualize.py similarity index 98% rename from GPy/util/visualize.py rename to GPy/plotting/matplot_dep/visualize.py index 691326ac..99e8a0da 100644 --- a/GPy/util/visualize.py +++ b/GPy/plotting/matplot_dep/visualize.py @@ -177,7 +177,7 @@ class lvm_subplots(lvm): assert len(latent_axes)==self.nplots if vals==None: vals = Model.X[0, :] - self.latent_values = vals + self.latent_values = vals for i, axis in enumerate(latent_axes): if i == self.nplots-1: @@ -195,7 +195,7 @@ class lvm_dimselect(lvm): A visualizer for latent variable models which allows selection of the latent dimensions to use by clicking on a bar chart of their length scales. For an example of the visualizer's use try: - + GPy.examples.dimensionality_reduction.BGPVLM_oil() """ @@ -219,11 +219,11 @@ class lvm_dimselect(lvm): new_index = max(0,min(int(np.round(event.xdata-0.5)),self.model.input_dim-1)) if event.button == 1: # Make it red if and y-axis (red=port=left) if it is a left button click - self.latent_index[1] = new_index + self.latent_index[1] = new_index else: # Make it green and x-axis (green=starboard=right) if it is a right button click self.latent_index[0] = new_index - + self.show_sensitivities() self.latent_axes.cla() @@ -288,7 +288,7 @@ class image_show(matplotlib_show): def modify(self, vals): self.set_image(vals.copy()) self.handle.set_array(self.vals) - self.axes.figure.canvas.draw() + self.axes.figure.canvas.draw() def set_image(self, vals): dim = self.dimensions[0] * self.dimensions[1] @@ -306,7 +306,7 @@ class image_show(matplotlib_show): last_col = (iC+1)*self.dimensions[1] self.vals[first_row:last_row, first_col:last_col] = cur_img - else: + else: self.vals = np.reshape(vals[0,dim*self.select_image+np.array(range(dim))], self.dimensions, order=self.order) if self.transpose: self.vals = self.vals.T @@ -359,7 +359,7 @@ class mocap_data_show_vpython(vpython_show): def modify_edges(self): self.line_handle = [] - if not self.connect==None: + if not self.connect==None: self.I, self.J = np.nonzero(self.connect) for rod, i, j in zip(self.rods, self.I, self.J): rod.pos, rod.axis = self.pos_axis(i, j) @@ -404,7 +404,7 @@ class mocap_data_show(matplotlib_show): def draw_vertices(self): self.points_handle = self.axes.scatter(self.vals[:, 0], self.vals[:, 1], self.vals[:, 2]) - + def draw_edges(self): self.line_handle = [] if not self.connect==None: @@ -423,7 +423,7 @@ class mocap_data_show(matplotlib_show): z.append(self.vals[j, 2]) z.append(np.NaN) self.line_handle = self.axes.plot(np.array(x), np.array(y), np.array(z), 'b-') - + def modify(self, vals): self.vals = vals.copy() self.process_values() diff --git a/GPy/util/__init__.py b/GPy/util/__init__.py index 398dd252..c25b1349 100644 --- a/GPy/util/__init__.py +++ b/GPy/util/__init__.py @@ -4,21 +4,12 @@ import linalg import misc -import plot import squashers -import Tango import warping_functions import datasets import mocap -import visualize import decorators import classification -import latent_space_visualizations -try: - import maps -except: - pass - maps = "warning: the maps module requires pyshp (shapefile). Install it to remove this message" try: import sympy @@ -29,5 +20,3 @@ except ImportError as e: if _sympy_available: import symbolic - -import netpbmfile diff --git a/GPy/util/linalg.py b/GPy/util/linalg.py index 4cc2d7e3..b8c6a1df 100644 --- a/GPy/util/linalg.py +++ b/GPy/util/linalg.py @@ -279,14 +279,14 @@ def ppca(Y, Q, iterations=100): def ppca_missing_data_at_random(Y, Q, iters=100): """ EM implementation of Probabilistic pca for when there is missing data. - + Taken from .. math: \\mathbf{Y} = \mathbf{XW} + \\epsilon \\text{, where} \\epsilon = \\mathcal{N}(0, \\sigma^2 \mathbf{I}) - - :returns: X, W, sigma^2 + + :returns: X, W, sigma^2 """ from numpy.ma import dot as madot import diag @@ -300,19 +300,21 @@ def ppca_missing_data_at_random(Y, Q, iters=100): nu = 1. #num_obs_i = 1./Y.count() Ycentered = Y - Y.mean(0) - + X = np.zeros((N,Q)) cs = common_subarrays(Y.mask) cr = common_subarrays(Y.mask, 1) Sigma = np.zeros((N, Q, Q)) Sigma2 = np.zeros((N, Q, Q)) mu = np.zeros(D) + """ if debug: import matplotlib.pyplot as pylab - fig = pylab.figure("FIT MISSING DATA"); + fig = pylab.figure("FIT MISSING DATA"); ax = fig.gca() ax.cla() lines = pylab.plot(np.zeros((N,Q)).dot(W)) + """ W2 = np.zeros((Q,D)) for i in range(iters): @@ -358,6 +360,7 @@ def ppca_missing_data_at_random(Y, Q, iters=100): nu2 /= N nu4 = (((Ycentered - X.dot(W))**2).sum(0) + W.T.dot(Sigma.sum(0).dot(W)).sum(0)).sum()/N import ipdb;ipdb.set_trace() + """ if debug: #print Sigma[0] print "nu:", nu, "sum(X):", X.sum() @@ -368,6 +371,7 @@ def ppca_missing_data_at_random(Y, Q, iters=100): ax.set_ylim(pred_y.min(), pred_y.max()) fig.canvas.draw() time.sleep(.3) + """ return np.asarray_chkfinite(X), np.asarray_chkfinite(W), nu