diff --git a/GPy/kern/src/add.py b/GPy/kern/src/add.py index 41b4febb..9f80ac9d 100644 --- a/GPy/kern/src/add.py +++ b/GPy/kern/src/add.py @@ -254,7 +254,4 @@ class Add(CombinationKernel): i_s[k._all_dims_active] += k.input_sensitivity(summarize) return i_s else: - i_s = np.zeros((len(self.parts), self.input_dim)) - from operator import setitem - [setitem(i_s, (i, k._all_dims_active), k.input_sensitivity(summarize)) for i, k in enumerate(self.parts)] - return i_s + return super(Add, self).input_sensitivity(summarize) diff --git a/GPy/kern/src/kern.py b/GPy/kern/src/kern.py index 19e98b89..c3d98a9b 100644 --- a/GPy/kern/src/kern.py +++ b/GPy/kern/src/kern.py @@ -341,7 +341,20 @@ class CombinationKernel(Kern): otherwise put everything into an array with shape (#kernels, input_dim) in the order of appearance of the kernels in the parameterized object. """ - raise NotImplementedError("Choose the kernel you want to get the sensitivity for. You need to override the default behaviour for getting the input sensitivity to be able to get the input sensitivity. For sum kernel it is the sum of all sensitivities, TODO: product kernel? Other kernels?, also TODO: shall we return all the sensitivities here in the combination kernel? So we can combine them however we want? This could lead to just plot all the sensitivities here...") + if not summarize: + num_params = [0] + parts = [] + def sum_params(x): + if (not isinstance(x, CombinationKernel)) and isinstance(x, Kern): + num_params[0] += 1 + parts.append(x) + self.traverse(sum_params) + i_s = np.zeros((num_params[0], self.input_dim)) + from operator import setitem + [setitem(i_s, (i, k._all_dims_active), k.input_sensitivity(summarize)) for i, k in enumerate(parts)] + return i_s + else: + raise NotImplementedError("Choose the kernel you want to get the sensitivity for. You need to override the default behaviour for getting the input sensitivity to be able to get the input sensitivity. For sum kernel it is the sum of all sensitivities, TODO: product kernel? Other kernels?, also TODO: shall we return all the sensitivities here in the combination kernel? So we can combine them however we want? This could lead to just plot all the sensitivities here...") def _check_active_dims(self, X): return diff --git a/GPy/kern/src/prod.py b/GPy/kern/src/prod.py index b47e663d..68883af4 100644 --- a/GPy/kern/src/prod.py +++ b/GPy/kern/src/prod.py @@ -97,4 +97,11 @@ class Prod(CombinationKernel): target += p.gradients_X_diag(k/p.Kdiag(X),X) return target - + def input_sensitivity(self, summarize=True): + if summarize: + i_s = np.zeros((self.input_dim)) + for k in self.parts: + i_s[k._all_dims_active] *= k.input_sensitivity(summarize) + return i_s + else: + return super(Prod, self).input_sensitivity(summarize) diff --git a/GPy/plotting/gpy_plot/gp_plots.py b/GPy/plotting/gpy_plot/gp_plots.py index 2b00dfa8..da92748d 100644 --- a/GPy/plotting/gpy_plot/gp_plots.py +++ b/GPy/plotting/gpy_plot/gp_plots.py @@ -32,7 +32,7 @@ import numpy as np from . import plotting_library as pl from .plot_util import helper_for_plot_data, update_not_existing_kwargs, \ - helper_predict_with_model, get_which_data_ycols + helper_predict_with_model, get_which_data_ycols, get_x_y_var from .data_plots import _plot_data, _plot_inducing, _plot_data_error def plot_mean(self, plot_limits=None, fixed_inputs=None, @@ -66,8 +66,9 @@ def plot_mean(self, plot_limits=None, fixed_inputs=None, :param dict predict_kw: the keyword arguments for the prediction. If you want to plot a specific kernel give dict(kern=) in here """ canvas, kwargs = pl().new_canvas(projection=projection, **kwargs) - helper_data = helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution) - helper_prediction = helper_predict_with_model(self, helper_data[5], plot_raw, + X = get_x_y_var(self)[0] + helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, None, get_which_data_ycols(self, which_data_ycols), predict_kw) @@ -79,7 +80,7 @@ def _plot_mean(self, canvas, helper_data, helper_prediction, levels=20, projection='2d', label=None, **kwargs): - _, _, _, _, free_dims, Xgrid, x, y, _, _, resolution = helper_data + _, free_dims, Xgrid, x, y, _, _, resolution = helper_data if len(free_dims)<=2: mu, _, _ = helper_prediction if len(free_dims)==1: @@ -135,15 +136,16 @@ def plot_confidence(self, lower=2.5, upper=97.5, plot_limits=None, fixed_inputs= """ canvas, kwargs = pl().new_canvas(**kwargs) ycols = get_which_data_ycols(self, which_data_ycols) - helper_data = helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution) - helper_prediction = helper_predict_with_model(self, helper_data[5], plot_raw, apply_link, + X = get_x_y_var(self)[0] + helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, (lower, upper), ycols, predict_kw) plots = _plot_confidence(self, canvas, helper_data, helper_prediction, label, **kwargs) return pl().add_to_canvas(canvas, plots, legend=label is not None) def _plot_confidence(self, canvas, helper_data, helper_prediction, label, **kwargs): - _, _, _, _, free_dims, Xgrid, _, _, _, _, _ = helper_data + _, free_dims, Xgrid, _, _, _, _, _ = helper_data update_not_existing_kwargs(kwargs, pl().defaults.confidence_interval) # @UndefinedVariable if len(free_dims)<=1: if len(free_dims)==1: @@ -188,8 +190,9 @@ def plot_samples(self, plot_limits=None, fixed_inputs=None, """ canvas, kwargs = pl().new_canvas(projection=projection, **kwargs) ycols = get_which_data_ycols(self, which_data_ycols) - helper_data = helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution) - helper_prediction = helper_predict_with_model(self, helper_data[5], plot_raw, apply_link, + X = get_x_y_var(self)[0] + helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, None, ycols, predict_kw, samples) plots = _plot_samples(self, canvas, helper_data, helper_prediction, @@ -198,7 +201,7 @@ def plot_samples(self, plot_limits=None, fixed_inputs=None, def _plot_samples(self, canvas, helper_data, helper_prediction, projection, label, **kwargs): - _, _, _, _, free_dims, Xgrid, x, y, _, _, resolution = helper_data + _, free_dims, Xgrid, x, y, _, _, resolution = helper_data samples = helper_prediction[2] if len(free_dims)<=2: @@ -247,8 +250,9 @@ def plot_density(self, plot_limits=None, fixed_inputs=None, :param dict predict_kw: the keyword arguments for the prediction. If you want to plot a specific kernel give dict(kern=) in here """ canvas, kwargs = pl().new_canvas(**kwargs) - helper_data = helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution) - helper_prediction = helper_predict_with_model(self, helper_data[5], plot_raw, + X = get_x_y_var(self)[0] + helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, np.linspace(2.5, 97.5, levels*2), get_which_data_ycols(self, which_data_ycols), predict_kw) @@ -256,7 +260,7 @@ def plot_density(self, plot_limits=None, fixed_inputs=None, return pl().add_to_canvas(canvas, plots) def _plot_density(self, canvas, helper_data, helper_prediction, label, **kwargs): - _, _, _, _, free_dims, Xgrid, _, _, _, _, _ = helper_data + _, free_dims, Xgrid, _, _, _, _, _ = helper_data mu, percs, _ = helper_prediction update_not_existing_kwargs(kwargs, pl().defaults.density) # @UndefinedVariable @@ -316,8 +320,9 @@ def plot(self, plot_limits=None, fixed_inputs=None, :param bool legend: convenience, whether to put a legend on the plot or not. """ canvas, _ = pl().new_canvas(projection=projection, **kwargs) - helper_data = helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution) - helper_prediction = helper_predict_with_model(self, helper_data[5], plot_raw, + X = get_x_y_var(self)[0] + helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, np.linspace(2.5, 97.5, levels*2) if plot_density else (lower,upper), get_which_data_ycols(self, which_data_ycols), predict_kw, samples) @@ -330,7 +335,7 @@ def plot(self, plot_limits=None, fixed_inputs=None, plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error")) plots.update(_plot(self, canvas, plots, helper_data, helper_prediction, levels, plot_inducing, plot_density, projection)) if plot_raw and (samples_likelihood > 0): - helper_prediction = helper_predict_with_model(self, helper_data[5], False, + helper_prediction = helper_predict_with_model(self, helper_data[2], False, apply_link, None, get_which_data_ycols(self, which_data_ycols), predict_kw, samples_likelihood) diff --git a/GPy/plotting/gpy_plot/kernel_plots.py b/GPy/plotting/gpy_plot/kernel_plots.py index 2194fdfc..daae15b8 100644 --- a/GPy/plotting/gpy_plot/kernel_plots.py +++ b/GPy/plotting/gpy_plot/kernel_plots.py @@ -30,10 +30,8 @@ import numpy as np from . import plotting_library as pl from .. import Tango -from .plot_util import get_x_y_var,\ - update_not_existing_kwargs, \ - helper_for_plot_data, scatter_label_generator, subsample_X,\ - find_best_layout_for_subplots +from .plot_util import update_not_existing_kwargs, helper_for_plot_data +from ...kern.src.kern import Kern, CombinationKernel def plot_ARD(kernel, filtering=None, legend=False, **kwargs): """ @@ -53,110 +51,90 @@ def plot_ARD(kernel, filtering=None, legend=False, **kwargs): x = np.arange(kernel.input_dim) + parts = [] + def visit(x): + if (not isinstance(x, CombinationKernel)) and isinstance(x, Kern): + parts.append(x) + kernel.traverse(visit) + if filtering is None: - filtering = kernel.parameter_names(recursive=False) + filtering = [k.name for k in parts] bars = [] kwargs = update_not_existing_kwargs(kwargs, pl().defaults.ard) - canvas, kwargs = pl().new_canvas(xlim=(-.5, kernel.input_dim-.5), **kwargs) + canvas, kwargs = pl().new_canvas(xlim=(-.5, kernel.input_dim-.5), xlabel='input dimension', ylabel='sensitivity', **kwargs) for i in range(ard_params.shape[0]): - if kernel.parameters[i].name in filtering: + if parts[i].name in filtering: c = Tango.nextMedium() bars.append(pl().barplot(canvas, x, ard_params[i,:], color=c, - label=kernel.parameters[i].name, + label=parts[i].name, bottom=bottom, **kwargs)) last_bottom = ard_params[i,:] bottom += last_bottom else: - print("filtering out {}".format(kernel.parameters[i].name)) + print("filtering out {}".format(parts[i].name)) #add_bar_labels(fig, ax, [bars[-1]], bottom=bottom-last_bottom) return pl().add_to_canvas(canvas, bars, legend=legend) -def plot_covariance(kernel, x=None, label=None, plot_limits=None, visible_dims=None, resolution=None, projection=None, levels=20, **mpl_kwargs): +def plot_covariance(kernel, x=None, label=None, + plot_limits=None, visible_dims=None, resolution=None, + projection='2d', levels=20, **kwargs): """ - plot a kernel. - :param x: the value to use for the other kernel argument (kernels are a function of two variables!) - :param fignum: figure number of the plot - :param ax: matplotlib axis to plot on - :param title: the matplotlib title + Plot a kernel covariance w.r.t. another x. + + :param array-like x: the value to use for the other kernel argument (kernels are a function of two variables!) :param plot_limits: the range over which to plot the kernel - :resolution: the resolution of the lines used in plotting - :mpl_kwargs avalid keyword arguments to pass through to matplotlib (e.g. lw=7) + :type plot_limits: Either (xmin, xmax) for 1D or (xmin, xmax, ymin, ymax) / ((xmin, xmax), (ymin, ymax)) for 2D + :param array-like visible_dims: input dimensions (!) to use for x. Make sure to select 2 or less dimensions to plot. + :resolution: the resolution of the lines used in plotting. for 2D this defines the grid for kernel evaluation. + :param {2d|3d} projection: What projection shall we use to plot the kernel? + :param int levels: for 2D projection, how many levels for the contour plot to use? + :param kwargs: valid kwargs for your specific plotting library """ - canvas, error_kwargs = pl().new_canvas(projection=projection, **error_kwargs) - _, _, _, _, free_dims, Xgrid, x, y, _, _, resolution = helper_for_plot_data(kernel, plot_limits, visible_dims, None, resolution) + X = np.ones((2, kernel.input_dim)) * [[-3], [3]] + _, free_dims, Xgrid, xx, yy, _, _, resolution = helper_for_plot_data(kernel, X, plot_limits, visible_dims, None, resolution) + + from numbers import Number + if x is None: + from ...kern.src.stationary import Stationary + x = np.ones((1, kernel.input_dim)) * (not isinstance(kernel, Stationary)) + elif isinstance(x, Number): + x = np.ones((1, kernel.input_dim))*x + K = kernel.K(Xgrid, x) + + if projection == '3d': + xlabel = 'X[:,0]' + ylabel = 'X[:,1]' + zlabel = "k(X, {!s})".format(np.asanyarray(x).tolist()) + else: + xlabel = 'X' + ylabel = "k(X, {!s})".format(np.asanyarray(x).tolist()) + zlabel = None + + canvas, kwargs = pl().new_canvas(projection=projection, xlabel=xlabel, ylabel=ylabel, zlabel=zlabel, **kwargs) if len(free_dims)<=2: if len(free_dims)==1: - if x is None: x = np.zeros((1, 1)) - else: - x = np.asarray(x) - assert x.size == 1, "The size of the fixed variable x is not 1" - x = x.reshape((1, 1)) # 1D plotting: update_not_existing_kwargs(kwargs, pl().defaults.meanplot_1d) # @UndefinedVariable - plots = dict(covariance=[pl().plot(canvas, Xgrid[:, free_dims], mu, label=label, **kwargs)]) + plots = dict(covariance=[pl().plot(canvas, Xgrid[:, free_dims], K, label=label, **kwargs)]) else: if projection == '2d': update_not_existing_kwargs(kwargs, pl().defaults.meanplot_2d) # @UndefinedVariable - plots = dict(covariance=[pl().contour(canvas, x, y, - mu.reshape(resolution, resolution).T, + plots = dict(covariance=[pl().contour(canvas, xx[:, 0], yy[0, :], + K.reshape(resolution, resolution), levels=levels, label=label, **kwargs)]) elif projection == '3d': update_not_existing_kwargs(kwargs, pl().defaults.meanplot_3d) # @UndefinedVariable - plots = dict(covariance=[pl().surface(canvas, x, y, - mu.reshape(resolution, resolution).T, + plots = dict(covariance=[pl().surface(canvas, xx, yy, + K.reshape(resolution, resolution), label=label, **kwargs)]) - return pl().add_to_canvas(canvas, plots) - if kernel.input_dim == 1: - - if plot_limits == None: - xmin, xmax = (x - 5).flatten(), (x + 5).flatten() - elif len(plot_limits) == 2: - xmin, xmax = plot_limits - else: - raise ValueError("Bad limits for plotting") - - Xnew = np.linspace(xmin, xmax, resolution or 201)[:, None] - Kx = kernel.K(Xnew, x) - ax.plot(Xnew, Kx, **mpl_kwargs) - ax.set_xlim(xmin, xmax) - ax.set_xlabel("x") - ax.set_ylabel("k(x,%0.1f)" % x) - - elif kernel.input_dim == 2: - if x is None: - x = np.zeros((1, 2)) - else: - x = np.asarray(x) - assert x.size == 2, "The size of the fixed variable x is not 2" - x = x.reshape((1, 2)) - - if plot_limits is None: - xmin, xmax = (x - 5).flatten(), (x + 5).flatten() - elif len(plot_limits) == 2: - xmin, xmax = plot_limits - else: - raise ValueError("Bad limits for plotting") - - - resolution = resolution or 51 - xx, yy = np.mgrid[xmin[0]:xmax[0]:1j * resolution, xmin[1]:xmax[1]:1j * resolution] - Xnew = np.vstack((xx.flatten(), yy.flatten())).T - Kx = kernel.K(Xnew, x) - Kx = Kx.reshape(resolution, resolution).T - ax.contour(xx, yy, Kx, vmin=Kx.min(), vmax=Kx.max(), cmap=pb.cm.jet, **mpl_kwargs) # @UndefinedVariable - ax.set_xlim(xmin[0], xmax[0]) - ax.set_ylim(xmin[1], xmax[1]) - ax.set_xlabel("x1") - ax.set_ylabel("x2") - ax.set_title("k(x1,x2 ; %0.1f,%0.1f)" % (x[0, 0], x[0, 1])) else: raise NotImplementedError("Cannot plot a kernel with more than two input dimensions") diff --git a/GPy/plotting/gpy_plot/latent_plots.py b/GPy/plotting/gpy_plot/latent_plots.py index 52c5fd17..2e5c7148 100644 --- a/GPy/plotting/gpy_plot/latent_plots.py +++ b/GPy/plotting/gpy_plot/latent_plots.py @@ -185,7 +185,8 @@ def plot_magnification(self, labels=None, which_indices=None, :param kwargs: the kwargs for the scatter plots """ input_1, input_2 = which_indices = self.get_most_significant_input_dimensions(which_indices)[:2] - X, _, _, _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, plot_limits, which_indices, None, resolution) + X = get_x_y_var(self)[0] + _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, X, plot_limits, which_indices, None, resolution) canvas, imshow_kwargs = pl().new_canvas(xlim=(xmin[0], xmax[0]), ylim=(xmin[1], xmax[1]), xlabel='latent dimension %i' % input_1, ylabel='latent dimension %i' % input_2, **imshow_kwargs) if (labels is not None): @@ -248,7 +249,8 @@ def plot_latent(self, labels=None, which_indices=None, :param scatter_kwargs: the kwargs for the scatter plots """ input_1, input_2 = which_indices = self.get_most_significant_input_dimensions(which_indices)[:2] - X, _, _, _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, plot_limits, which_indices, None, resolution) + X = get_x_y_var(self)[0] + _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, X, plot_limits, which_indices, None, resolution) canvas, imshow_kwargs = pl().new_canvas(xlim=(xmin[0], xmax[0]), ylim=(xmin[1], xmax[1]), xlabel='latent dimension %i' % input_1, ylabel='latent dimension %i' % input_2, **imshow_kwargs) if (labels is not None): @@ -313,7 +315,8 @@ def plot_steepest_gradient_map(self, output_labels=None, data_labels=None, which :param scatter_kwargs: the kwargs for the scatter plots """ input_1, input_2 = which_indices = self.get_most_significant_input_dimensions(which_indices)[:2] - X, _, _, _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, plot_limits, which_indices, None, resolution) + X = get_x_y_var(self)[0] + _, _, Xgrid, _, _, xmin, xmax, resolution = helper_for_plot_data(self, X, plot_limits, which_indices, None, resolution) canvas, imshow_kwargs = pl().new_canvas(xlim=(xmin[0], xmax[0]), ylim=(xmin[1], xmax[1]), xlabel='latent dimension %i' % input_1, ylabel='latent dimension %i' % input_2, **imshow_kwargs) if (data_labels is not None): diff --git a/GPy/plotting/gpy_plot/plot_util.py b/GPy/plotting/gpy_plot/plot_util.py index 6aa1b8da..a60de819 100644 --- a/GPy/plotting/gpy_plot/plot_util.py +++ b/GPy/plotting/gpy_plot/plot_util.py @@ -102,19 +102,17 @@ def helper_predict_with_model(self, Xgrid, plot_raw, apply_link, percentiles, wh fsamples[:, s] = self.likelihood.gp_link.transf(fsamples[:, s]) return retmu, percs, fsamples -def helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resolution): +def helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution): """ Figure out the data, free_dims and create an Xgrid for the prediction. This is only implemented for two dimensions for now! """ - X, Xvar, Y = get_x_y_var(self) - #work out what the inputs are for plotting (1D or 2D) if fixed_inputs is None: fixed_inputs = [] - fixed_dims = get_fixed_dims(self, fixed_inputs) + fixed_dims = get_fixed_dims(fixed_inputs) free_dims = get_free_dims(self, visible_dims, fixed_dims) if len(free_dims) == 1: @@ -129,7 +127,7 @@ def helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resoluti y = None elif len(free_dims) == 2: #define the frame for plotting on - resolution = resolution or 50 + resolution = resolution or 35 Xnew, x, y, xmin, xmax = x_frame2D(X[:,free_dims], plot_limits, resolution) Xgrid = np.zeros((Xnew.shape[0], self.input_dim)) Xgrid[:,free_dims] = Xnew @@ -137,7 +135,7 @@ def helper_for_plot_data(self, plot_limits, visible_dims, fixed_inputs, resoluti Xgrid[:,i] = v else: raise TypeError("calculated free_dims {} from visible_dims {} and fixed_dims {} is neither 1D nor 2D".format(free_dims, visible_dims, fixed_dims)) - return X, Xvar, Y, fixed_dims, free_dims, Xgrid, x, y, xmin, xmax, resolution + return fixed_dims, free_dims, Xgrid, x, y, xmin, xmax, resolution def scatter_label_generator(labels, X, visible_dims, marker=None): ulabels = [] @@ -271,13 +269,16 @@ def update_not_existing_kwargs(to_update, update_from): def get_x_y_var(model): """ - The the data from a model as + Either the the data from a model as X the inputs, X_variance the variance of the inputs ([default: None]) and Y the outputs + If (X, X_variance, Y) is given, this just returns. + :returns: (X, X_variance, Y) """ + # model given if hasattr(model, 'has_uncertain_inputs') and model.has_uncertain_inputs(): X = model.X.mean.values X_variance = model.X.variance.values @@ -305,7 +306,7 @@ def get_free_dims(model, visible_dims, fixed_dims): return np.asanyarray([dim for dim in dims if dim is not None]) -def get_fixed_dims(model, fixed_inputs): +def get_fixed_dims(fixed_inputs): """ Work out the fixed dimensions from the fixed_inputs list of tuples. """ @@ -339,7 +340,7 @@ def x_frame1D(X,plot_limits=None,resolution=None): else: xmin,xmax = X.min(0),X.max(0) xmin, xmax = xmin-0.25*(xmax-xmin), xmax+0.25*(xmax-xmin) - elif len(plot_limits)==2: + elif len(plot_limits) == 2: xmin, xmax = plot_limits else: raise ValueError("Bad limits for plotting") @@ -355,9 +356,15 @@ def x_frame2D(X,plot_limits=None,resolution=None): if plot_limits is None: xmin, xmax = X.min(0),X.max(0) xmin, xmax = xmin-0.075*(xmax-xmin), xmax+0.075*(xmax-xmin) - elif len(plot_limits)==2: + elif len(plot_limits) == 2: xmin, xmax = plot_limits - elif len(plot_limits)==4: + try: + xmin = xmin[0], xmin[1] + except: + # only one limit given, copy over to other lim + xmin = [plot_limits[0], plot_limits[0]] + xmax = [plot_limits[1], plot_limits[1]] + elif len(plot_limits) == 4: xmin, xmax = (plot_limits[0], plot_limits[2]), (plot_limits[1], plot_limits[3]) else: raise ValueError("Bad limits for plotting") diff --git a/GPy/testing/plotting_tests.py b/GPy/testing/plotting_tests.py index 0c1e21f9..19064094 100644 --- a/GPy/testing/plotting_tests.py +++ b/GPy/testing/plotting_tests.py @@ -143,13 +143,16 @@ def test_kernel(): matplotlib.rcParams.update(matplotlib.rcParamsDefault) matplotlib.rcParams[u'figure.figsize'] = (4,3) matplotlib.rcParams[u'text.usetex'] = False - k = GPy.kern.RBF(5, ARD=True) + GPy.kern.Linear(3, active_dims=[0,2,4], ARD=True) + GPy.kern.Bias(2) + k = GPy.kern.RBF(5, ARD=True) * GPy.kern.Linear(3, active_dims=[0,2,4], ARD=True) + GPy.kern.Bias(2) k.randomize() - k2 = GPy.kern.RBF(5, ARD=True) + GPy.kern.Linear(3, active_dims=[0,2,4], ARD=True) + GPy.kern.Bias(2) + GPy.kern.White(4) + k2 = GPy.kern.RBF(5, ARD=True) * GPy.kern.Linear(3, active_dims=[0,2,4], ARD=True) + GPy.kern.Bias(2) + GPy.kern.White(4) k2[:-1] = k[:] - k2.plot_ARD([_.name for _ in k.parts], legend=True) + k2.plot_ARD(['rbf', 'linear', 'bias'], legend=True) + k2.plot_covariance(visible_dims=[0, 3], plot_limits=(-1,3)) + k2.plot_covariance(visible_dims=[2], plot_limits=(-1, 3)) + k2.plot_covariance(visible_dims=[2, 4], plot_limits=((-1, 0), (5, 3)), projection='3d') for do_test in _image_comparison( - baseline_images=['kern_{}'.format(sub) for sub in ["ARD",]], + baseline_images=['kern_{}'.format(sub) for sub in ["ARD", 'cov_2d', 'cov_1d', 'cov_3d']], extensions=extensions): yield (do_test, ) diff --git a/GPy/testing/plotting_tests/baseline/kern_cov_1d.png b/GPy/testing/plotting_tests/baseline/kern_cov_1d.png new file mode 100644 index 00000000..449a686d Binary files /dev/null and b/GPy/testing/plotting_tests/baseline/kern_cov_1d.png differ diff --git a/GPy/testing/plotting_tests/baseline/kern_cov_2d.png b/GPy/testing/plotting_tests/baseline/kern_cov_2d.png new file mode 100644 index 00000000..db76f5b6 Binary files /dev/null and b/GPy/testing/plotting_tests/baseline/kern_cov_2d.png differ diff --git a/GPy/testing/plotting_tests/baseline/kern_cov_3d.png b/GPy/testing/plotting_tests/baseline/kern_cov_3d.png new file mode 100644 index 00000000..31b32b5e Binary files /dev/null and b/GPy/testing/plotting_tests/baseline/kern_cov_3d.png differ diff --git a/doc/source/conf.py b/doc/source/conf.py index 91187571..68d0c2a8 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -85,8 +85,9 @@ master_doc = 'index' # General information about the project. project = u'GPy' -copyright = u'2015, GPy Authors' -author = u'GPy Authors' +#author = u'`Humans `_' +author = 'GPy Authors, see https://github.com/SheffieldML/GPy/graphs/contributors' +copyright = u'2015, '+author # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/setup.py b/setup.py index 014c7f36..fdccea7d 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ def read_to_rst(fname): try: import pypandoc rstname = "{}.{}".format(os.path.splitext(fname)[0], 'rst') - pypandoc.convert(read(fname), 'rst', rstname) + pypandoc.convert(read(fname), 'rst', outputfile=rstname) return read(rstname) except ImportError: return read(fname)