diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 024b12ee..ce1c89e8 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -459,7 +459,7 @@ def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw): D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, plot_sim) - k = kern.Linear(Q) + kern.White(Q, variance=1e-4) + k = kern.Linear(Q, ARD=True) + kern.White(Q, variance=1e-4) m = MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernel=k, initx="PCA_concat", initz='permute', **kw) m['.*noise'] = [Y.var() / 40. for Y in Ylist] @@ -479,7 +479,7 @@ def mrd_simulation_missing_data(optimize=True, verbose=True, plot=True, plot_sim D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim) - k = kern.Linear(Q) + kern.White(Q, variance=1e-4) + k = kern.Linear(Q, ARD=True) + kern.White(Q, variance=1e-4) inanlist = [] for Y in Ylist: diff --git a/GPy/installation.cfg b/GPy/installation.cfg index 8458a86b..841bf608 100644 --- a/GPy/installation.cfg +++ b/GPy/installation.cfg @@ -15,4 +15,4 @@ # [plotting] -# library = matplotlib # plotly +# library = matplotlib # plotly, none diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index be28d1a5..4e7f2f3b 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -5,14 +5,14 @@ import numpy as np import itertools, logging from ..kern import Kern -from GPy.core.parameterization.variational import NormalPrior +from ..core.parameterization.variational import NormalPrior from ..core.parameterization import Param from paramz import ObsAr from ..inference.latent_function_inference.var_dtc import VarDTC from ..inference.latent_function_inference import InferenceMethodList from ..likelihoods import Gaussian from ..util.initialization import initialize_latent -from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch +from ..models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch class MRD(BayesianGPLVMMiniBatch): """ @@ -215,40 +215,6 @@ class MRD(BayesianGPLVMMiniBatch): Z = np.random.randn(self.num_inducing, self.input_dim) * X.var() return Z - def _handle_plotting(self, fignum, axes, plotf, sharex=False, sharey=False): - import matplotlib.pyplot as plt - if axes is None: - fig = plt.figure(num=fignum) - sharex_ax = None - sharey_ax = None - plots = [] - for i, g in enumerate(self.bgplvms): - try: - if sharex: - sharex_ax = ax # @UndefinedVariable - sharex = False # dont set twice - if sharey: - sharey_ax = ax # @UndefinedVariable - sharey = False # dont set twice - except: - pass - if axes is None: - ax = fig.add_subplot(1, len(self.bgplvms), i + 1, sharex=sharex_ax, sharey=sharey_ax) - elif isinstance(axes, (tuple, list, np.ndarray)): - ax = axes[i] - else: - raise ValueError("Need one axes per latent dimension input_dim") - plots.append(plotf(i, g, ax)) - if sharey_ax is not None: - plt.setp(ax.get_yticklabels(), visible=False) - plt.draw() - if axes is None: - try: - fig.tight_layout() - except: - pass - return plots - def predict(self, Xnew, full_cov=False, Y_metadata=None, kern=None, Yindex=0): """ Prediction for data set Yindex[default=0]. @@ -270,59 +236,53 @@ class MRD(BayesianGPLVMMiniBatch): # sharex=sharex, sharey=sharey) # return fig - def plot_scales(self, fignum=None, ax=None, titles=None, sharex=False, sharey=True, *args, **kwargs): + def plot_scales(self, titles=None, fig_kwargs=dict(figsize=None, tight_layout=True), **kwargs): """ - - TODO: Explain other parameters + Plot input sensitivity for all datasets, to see which input dimensions are + significant for which dataset. :param titles: titles for axes of datasets + kwargs go into plot_ARD for each kernel. """ + from ..plotting import plotting_library as pl + if titles is None: titles = [r'${}$'.format(name) for name in self.names] - ymax = reduce(max, [np.ceil(max(g.kern.input_sensitivity())) for g in self.bgplvms]) - def plotf(i, g, ax): - #ax.set_ylim([0,ymax]) - return g.kern.plot_ARD(ax=ax, title=titles[i], *args, **kwargs) - fig = self._handle_plotting(fignum, ax, plotf, sharex=sharex, sharey=sharey) - return fig + + M = len(self.bgplvms) + fig = pl().figure(rows=1, cols=M, **fig_kwargs) + plots = {} + for c in range(M): + canvas = self.bgplvms[c].kern.plot_ARD(title=titles[c], figure=fig, col=c+1, **kwargs) + plots[titles[c]] = canvas + pl().show_canvas(canvas) + return plots def plot_latent(self, labels=None, which_indices=None, - resolution=50, ax=None, marker='o', s=40, - fignum=None, plot_inducing=True, legend=True, + resolution=60, legend=True, plot_limits=None, - aspect='auto', updates=False, predict_kwargs={}, imshow_kwargs={}): + updates=False, + kern=None, marker='<>^vsd', + num_samples=1000, projection='2d', + predict_kwargs={}, + scatter_kwargs=None, **imshow_kwargs): """ see plotting.matplot_dep.dim_reduction_plots.plot_latent if predict_kwargs is None, will plot latent spaces for 0th dataset (and kernel), otherwise give predict_kwargs=dict(Yindex='index') for plotting only the latent space of dataset with 'index'. """ - import sys - assert "matplotlib" in sys.modules, "matplotlib package has not been imported." - from matplotlib import pyplot as plt - from ..plotting.matplot_dep import dim_reduction_plots + from ..plotting.gpy_plot.latent_plots import plot_latent + if "Yindex" not in predict_kwargs: predict_kwargs['Yindex'] = 0 Yindex = predict_kwargs['Yindex'] - if ax is None: - fig = plt.figure(num=fignum) - ax = fig.add_subplot(111) - else: - fig = ax.figure + self.kern = self.bgplvms[Yindex].kern self.likelihood = self.bgplvms[Yindex].likelihood - plot = dim_reduction_plots.plot_latent(self, labels, which_indices, - resolution, ax, marker, s, - fignum, plot_inducing, legend, - plot_limits, aspect, updates, predict_kwargs, imshow_kwargs) - ax.set_title(self.bgplvms[Yindex].name) - try: - fig.tight_layout() - except: - pass - return plot + return plot_latent(self, labels, which_indices, resolution, legend, plot_limits, updates, kern, marker, num_samples, projection, scatter_kwargs) def __getstate__(self): state = super(MRD, self).__getstate__() diff --git a/GPy/plotting/__init__.py b/GPy/plotting/__init__.py index c46d5281..4b833fe3 100644 --- a/GPy/plotting/__init__.py +++ b/GPy/plotting/__init__.py @@ -25,18 +25,66 @@ def change_plotting_library(lib): current_lib[0] = PlotlyPlots() if lib == 'none': current_lib[0] = None + inject_plotting() #=========================================================================== except (ImportError, NameError): config.set('plotting', 'library', 'none') + raise import warnings warnings.warn(ImportWarning("You spevified {} in your configuration, but is not available. Install newest version of {} for plotting".format(lib, lib))) -from ..util.config import config, NoOptionError -try: - lib = config.get('plotting', 'library') - change_plotting_library(lib) -except NoOptionError: - print("No plotting library was specified in config file. \n{}".format(error_suggestion)) +def inject_plotting(): + if current_lib[0] is not None: + # Inject the plots into classes here: + + # Already converted to new style: + from . import gpy_plot + + from ..core import GP + GP.plot_data = gpy_plot.data_plots.plot_data + GP.plot_data_error = gpy_plot.data_plots.plot_data_error + GP.plot_errorbars_trainset = gpy_plot.data_plots.plot_errorbars_trainset + GP.plot_mean = gpy_plot.gp_plots.plot_mean + GP.plot_confidence = gpy_plot.gp_plots.plot_confidence + GP.plot_density = gpy_plot.gp_plots.plot_density + GP.plot_samples = gpy_plot.gp_plots.plot_samples + GP.plot = gpy_plot.gp_plots.plot + GP.plot_f = gpy_plot.gp_plots.plot_f + GP.plot_magnification = gpy_plot.latent_plots.plot_magnification + + from ..core import SparseGP + SparseGP.plot_inducing = gpy_plot.data_plots.plot_inducing + + from ..models import GPLVM, BayesianGPLVM, bayesian_gplvm_minibatch, SSGPLVM, SSMRD + GPLVM.plot_latent = gpy_plot.latent_plots.plot_latent + GPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter + GPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing + GPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map + BayesianGPLVM.plot_latent = gpy_plot.latent_plots.plot_latent + BayesianGPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter + BayesianGPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing + BayesianGPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map + bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_latent = gpy_plot.latent_plots.plot_latent + bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter + bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing + bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map + SSGPLVM.plot_latent = gpy_plot.latent_plots.plot_latent + SSGPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter + SSGPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing + SSGPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map + + from ..kern import Kern + Kern.plot_covariance = gpy_plot.kernel_plots.plot_covariance + def deprecate_plot(self, *args, **kwargs): + import warnings + warnings.warn(DeprecationWarning('Kern.plot is being deprecated and will not be available in the 1.0 release. Use Kern.plot_covariance instead')) + return self.plot_covariance(*args, **kwargs) + Kern.plot = deprecate_plot + Kern.plot_ARD = gpy_plot.kernel_plots.plot_ARD + + from ..inference.optimization import Optimizer + Optimizer.plot = gpy_plot.inference_plots.plot_optimizer + # Variational plot! def plotting_library(): if current_lib[0] is None: @@ -53,54 +101,10 @@ def show(figure, **kwargs): """ return plotting_library().show_canvas(figure, **kwargs) -if config.get('plotting', 'library') is not 'none': - # Inject the plots into classes here: - # Already converted to new style: - from . import gpy_plot - - from ..core import GP - GP.plot_data = gpy_plot.data_plots.plot_data - GP.plot_data_error = gpy_plot.data_plots.plot_data_error - GP.plot_errorbars_trainset = gpy_plot.data_plots.plot_errorbars_trainset - GP.plot_mean = gpy_plot.gp_plots.plot_mean - GP.plot_confidence = gpy_plot.gp_plots.plot_confidence - GP.plot_density = gpy_plot.gp_plots.plot_density - GP.plot_samples = gpy_plot.gp_plots.plot_samples - GP.plot = gpy_plot.gp_plots.plot - GP.plot_f = gpy_plot.gp_plots.plot_f - GP.plot_magnification = gpy_plot.latent_plots.plot_magnification - - from ..core import SparseGP - SparseGP.plot_inducing = gpy_plot.data_plots.plot_inducing - - from ..models import GPLVM, BayesianGPLVM, bayesian_gplvm_minibatch, SSGPLVM, SSMRD - GPLVM.plot_latent = gpy_plot.latent_plots.plot_latent - GPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter - GPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing - GPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map - BayesianGPLVM.plot_latent = gpy_plot.latent_plots.plot_latent - BayesianGPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter - BayesianGPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing - BayesianGPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map - bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_latent = gpy_plot.latent_plots.plot_latent - bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter - bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing - bayesian_gplvm_minibatch.BayesianGPLVMMiniBatch.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map - SSGPLVM.plot_latent = gpy_plot.latent_plots.plot_latent - SSGPLVM.plot_scatter = gpy_plot.latent_plots.plot_latent_scatter - SSGPLVM.plot_inducing = gpy_plot.latent_plots.plot_latent_inducing - SSGPLVM.plot_steepest_gradient_map = gpy_plot.latent_plots.plot_steepest_gradient_map - - from ..kern import Kern - Kern.plot_covariance = gpy_plot.kernel_plots.plot_covariance - def deprecate_plot(self, *args, **kwargs): - import warnings - warnings.warn(DeprecationWarning('Kern.plot is being deprecated and will not be available in the 1.0 release. Use Kern.plot_covariance instead')) - return self.plot_covariance(*args, **kwargs) - Kern.plot = deprecate_plot - Kern.plot_ARD = gpy_plot.kernel_plots.plot_ARD - - from ..inference.optimization import Optimizer - Optimizer.plot = gpy_plot.inference_plots.plot_optimizer - # Variational plot! +from ..util.config import config, NoOptionError +try: + lib = config.get('plotting', 'library') + change_plotting_library(lib) +except NoOptionError: + print("No plotting library was specified in config file. \n{}".format(error_suggestion)) \ No newline at end of file diff --git a/GPy/plotting/gpy_plot/kernel_plots.py b/GPy/plotting/gpy_plot/kernel_plots.py index 492754b2..2255a665 100644 --- a/GPy/plotting/gpy_plot/kernel_plots.py +++ b/GPy/plotting/gpy_plot/kernel_plots.py @@ -33,7 +33,7 @@ from .. import Tango from .plot_util import update_not_existing_kwargs, helper_for_plot_data from ...kern.src.kern import Kern, CombinationKernel -def plot_ARD(kernel, filtering=None, legend=False, **kwargs): +def plot_ARD(kernel, filtering=None, legend=False, canvas=None, **kwargs): """ If an ARD kernel is present, plot a bar representation using matplotlib @@ -62,7 +62,11 @@ def plot_ARD(kernel, filtering=None, legend=False, **kwargs): bars = [] kwargs = update_not_existing_kwargs(kwargs, pl().defaults.ard) - canvas, kwargs = pl().new_canvas(xlim=(-.5, kernel._effective_input_dim-.5), xlabel='input dimension', ylabel='sensitivity', **kwargs) + + + if canvas is None: + canvas, kwargs = pl().new_canvas(xlim=(-.5, kernel._effective_input_dim-.5), xlabel='input dimension', ylabel='sensitivity', **kwargs) + for i in range(ard_params.shape[0]): if parts[i].name in filtering: c = Tango.nextMedium() @@ -96,7 +100,7 @@ def plot_covariance(kernel, x=None, label=None, """ X = np.ones((2, kernel._effective_input_dim)) * [[-3], [3]] _, free_dims, Xgrid, xx, yy, _, _, resolution = helper_for_plot_data(kernel, X, plot_limits, visible_dims, None, resolution) - + from numbers import Number if x is None: from ...kern.src.stationary import Stationary @@ -104,7 +108,7 @@ def plot_covariance(kernel, x=None, label=None, elif isinstance(x, Number): x = np.ones((1, kernel._effective_input_dim))*x K = kernel.K(Xgrid, x) - + if projection == '3d': xlabel = 'X[:,0]' ylabel = 'X[:,1]' diff --git a/GPy/plotting/matplot_dep/plot_definitions.py b/GPy/plotting/matplot_dep/plot_definitions.py index 9eb9efb0..52100ea3 100644 --- a/GPy/plotting/matplot_dep/plot_definitions.py +++ b/GPy/plotting/matplot_dep/plot_definitions.py @@ -42,10 +42,11 @@ class MatplotlibPlots(AbstractPlottingLibrary): super(MatplotlibPlots, self).__init__() self._defaults = defaults.__dict__ - def figure(self, rows=1, cols=1, **kwargs): - fig = plt.figure(**kwargs) + def figure(self, rows=1, cols=1, gridspec_kwargs={}, tight_layout=True, **kwargs): + fig = plt.figure(tight_layout=tight_layout, **kwargs) fig.rows = rows fig.cols = cols + fig.gridspec = plt.GridSpec(rows, cols, **gridspec_kwargs) return fig def new_canvas(self, figure=None, row=1, col=1, projection='2d', xlabel=None, ylabel=None, zlabel=None, title=None, xlim=None, ylim=None, zlim=None, **kwargs): @@ -56,7 +57,9 @@ class MatplotlibPlots(AbstractPlottingLibrary): if 'ax' in kwargs: ax = kwargs.pop('ax') else: - if 'num' in kwargs and 'figsize' in kwargs: + if figure is not None: + fig = figure + elif 'num' in kwargs and 'figsize' in kwargs: fig = self.figure(num=kwargs.pop('num'), figsize=kwargs.pop('figsize')) elif 'num' in kwargs: fig = self.figure(num=kwargs.pop('num')) @@ -66,7 +69,7 @@ class MatplotlibPlots(AbstractPlottingLibrary): fig = self.figure() #if hasattr(fig, 'rows') and hasattr(fig, 'cols'): - ax = fig.add_subplot(fig.rows, fig.cols, (col,row), projection=projection) + ax = fig.add_subplot(fig.gridspec[row-1, col-1], projection=projection) if xlim is not None: ax.set_xlim(xlim) if ylim is not None: ax.set_ylim(ylim) @@ -79,7 +82,7 @@ class MatplotlibPlots(AbstractPlottingLibrary): return ax, kwargs def add_to_canvas(self, ax, plots, legend=False, title=None, **kwargs): - ax.autoscale_view() + #ax.autoscale_view() fontdict=dict(family='sans-serif', weight='light', size=9) if legend is True: ax.legend(*ax.get_legend_handles_labels()) @@ -89,9 +92,7 @@ class MatplotlibPlots(AbstractPlottingLibrary): if title is not None: ax.figure.suptitle(title) return ax - def show_canvas(self, ax, tight_layout=False, **kwargs): - if tight_layout: - ax.figure.tight_layout() + def show_canvas(self, ax): ax.figure.canvas.draw() return ax.figure diff --git a/GPy/plotting/matplot_dep/ssgplvm.py b/GPy/plotting/matplot_dep/ssgplvm.py index b741bc5d..0ed8a043 100644 --- a/GPy/plotting/matplot_dep/ssgplvm.py +++ b/GPy/plotting/matplot_dep/ssgplvm.py @@ -13,16 +13,16 @@ class SSGPLVM_plot(object): self.model = model self.imgsize= imgsize assert model.Y.shape[1] == imgsize[0]*imgsize[1] - + def plot_inducing(self): fig1 = pylab.figure() mean = self.model.posterior.mean arr = mean.reshape(*(mean.shape[0],self.imgsize[1],self.imgsize[0])) plot_2D_images(fig1, arr) fig1.gca().set_title('The mean of inducing points') - + fig2 = pylab.figure() covar = self.model.posterior.covariance plot_2D_images(fig2, covar) fig2.gca().set_title('The variance of inducing points') - + diff --git a/GPy/testing/__init__.py b/GPy/testing/__init__.py index 2e64d90e..abad1fa3 100644 --- a/GPy/testing/__init__.py +++ b/GPy/testing/__init__.py @@ -1,14 +1,9 @@ -# Copyright (c) 2014, Max Zwiessele +# Copyright (c) 2014, Max Zwiessele, GPy Authors # Licensed under the BSD 3-clause license (see LICENSE.txt) -""" - -MaxZ - -""" import unittest import sys def deepTest(reason): if reason: return lambda x:x - return unittest.skip("Not deep scanning, enable deepscan by adding 'deep' argument") + return unittest.skip("Not deep scanning, enable deepscan by adding 'deep' argument to unittest call")