diff --git a/GPy/core/gp.py b/GPy/core/gp.py index acba1f3f..4f4de92e 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -1,10 +1,7 @@ # Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) - import numpy as np -from ..util.linalg import pdinv, tdot, dpotrs, dtrtrs -from ..likelihoods import EP from gp_base import GPBase class GP(GPBase): @@ -23,104 +20,36 @@ class GP(GPBase): """ def __init__(self, X, likelihood, kernel, normalize_X=False): super(GP, self).__init__(X, likelihood, kernel, normalize_X=normalize_X) - #self._set_params(self._get_params()) - - def getstate(self): - return GPBase.getstate(self) - - def setstate(self, state): - GPBase.setstate(self, state) - #self._set_params(self._get_params()) + self.Posterior = self.inference_method.inference(K, likelihood, self.Y) def parameters_changed(self): super(GP, self).parameters_changed() self.K = self.kern.K(self.X) - self.K += self.likelihood.covariance_matrix - self.Ki, self.L, self.Li, self.K_logdet = pdinv(self.K) - - # the gradient of the likelihood wrt the covariance matrix - if self.likelihood.YYT is None: - # alpha = np.dot(self.Ki, self.likelihood.Y) - alpha, _ = dpotrs(self.L, self.likelihood.Y, lower=1) - - self.dL_dK = 0.5 * (tdot(alpha) - self.output_dim * self.Ki) - else: - # tmp = mdot(self.Ki, self.likelihood.YYT, self.Ki) - tmp, _ = dpotrs(self.L, np.asfortranarray(self.likelihood.YYT), lower=1) - tmp, _ = dpotrs(self.L, np.asfortranarray(tmp.T), lower=1) - self.dL_dK = 0.5 * (tmp - self.output_dim * self.Ki) - -# def _get_params(self): -# return np.hstack((self.kern._get_params_transformed(), self.likelihood._get_params())) - -# def _get_param_names(self): -# return self.kern._get_param_names_transformed() + self.likelihood._get_param_names() - - def update_likelihood_approximation(self, **kwargs): - """ - Approximates a non-gaussian likelihood using Expectation Propagation - - For a Gaussian likelihood, no iteration is required: - this function does nothing - """ - self.likelihood.restart() - self.likelihood.fit_full(self.kern.K(self.X), **kwargs) -# self._set_params(self._get_params()) # update the GP - - def _model_fit_term(self): - """ - Computes the model fit using YYT if it's available - """ - if self.likelihood.YYT is None: - tmp, _ = dtrtrs(self.L, np.asfortranarray(self.likelihood.Y), lower=1) - return -0.5 * np.sum(np.square(tmp)) - # return -0.5 * np.sum(np.square(np.dot(self.Li, self.likelihood.Y))) - else: - return -0.5 * np.sum(np.multiply(self.Ki, self.likelihood.YYT)) + self.Posterior = self.inference_method.inference(K, likelihood, self.Y) def log_likelihood(self): - """ - The log marginal likelihood of the GP. + return self.posterior.log_marginal - For an EP model, can be written as the log likelihood of a regression - model for a new variable Y* = v_tilde/tau_tilde, with a covariance - matrix K* = K + diag(1./tau_tilde) plus a normalization term. - """ - return (-0.5 * self.num_data * self.output_dim * np.log(2.*np.pi) - - 0.5 * self.output_dim * self.K_logdet + self._model_fit_term() + self.likelihood.Z) - -# def _log_likelihood_gradients(self): -# """ -# The gradient of all parameters. -# -# Note, we use the chain rule: dL_dtheta = dL_dK * d_K_dtheta -# """ -# #return np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK)))) -# -# if not isinstance(self.likelihood,EP): -# tmp = np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK)))) -# else: -# tmp = np.hstack((self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X), self.likelihood._gradients(partial=np.diag(self.dL_dK)))) -# return tmp - def _raw_predict(self, _Xnew, which_parts='all', full_cov=False, stop=False): """ Internal helper function for making predictions, does not account for normalization or likelihood + + full_cov is a boolean which defines whether the full covariance matrix + of the prediction is computed. If full_cov is False (default), only the + diagonal of the covariance is returned. + """ Kx = self.kern.K(_Xnew, self.X, which_parts=which_parts).T - # KiKx = np.dot(self.Ki, Kx) - KiKx, _ = dpotrs(self.L, np.asfortranarray(Kx), lower=1) - mu = np.dot(KiKx.T, self.likelihood.Y) + LiKx, _ = dptrrs(self.posterior._woodbury_chol, np.asfortranarray(Kx), lower=1) + mu = np.dot(Kx.T, self.posterior._woodbury_vector) if full_cov: Kxx = self.kern.K(_Xnew, which_parts=which_parts) - var = Kxx - np.dot(KiKx.T, Kx) + var = Kxx - tdot(LiKx.T) else: Kxx = self.kern.Kdiag(_Xnew, which_parts=which_parts) - var = Kxx - np.sum(np.multiply(KiKx, Kx), 0) - var = var[:, None] - if stop: - debug_this # @UndefinedVariable + var = Kxx - np.sum(LiKx.T*LiKx, 0) + var = var.reshape(self.num_data, 1) return mu, var def predict(self, Xnew, which_parts='all', full_cov=False, **likelihood_args): @@ -150,41 +79,4 @@ class GP(GPBase): mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov, **likelihood_args) return mean, var, _025pm, _975pm - def _raw_predict_single_output(self, _Xnew, output, which_parts='all', full_cov=False,stop=False): - """ - For a specific output, calls _raw_predict() at the new point(s) _Xnew. - This functions calls _add_output_index(), so _Xnew should not have an index column specifying the output. - --------- - :param Xnew: The points at which to make a prediction - :type Xnew: np.ndarray, Nnew x self.input_dim - :param output: output to predict - :type output: integer in {0,..., output_dim-1} - :param which_parts: specifies which outputs kernel(s) to use in prediction - :type which_parts: ('all', list of bools) - :param full_cov: whether to return the full covariance matrix, or just the diagonal - - .. Note:: For multiple non-independent outputs models only. - """ - _Xnew = self._add_output_index(_Xnew, output) - return self._raw_predict(_Xnew, which_parts=which_parts,full_cov=full_cov, stop=stop) - - def predict_single_output(self, Xnew,output=0, which_parts='all', full_cov=False, likelihood_args=dict()): - """ - For a specific output, calls predict() at the new point(s) Xnew. - This functions calls _add_output_index(), so Xnew should not have an index column specifying the output. - - :param Xnew: The points at which to make a prediction - :type Xnew: np.ndarray, Nnew x self.input_dim - :param which_parts: specifies which outputs kernel(s) to use in prediction - :type which_parts: ('all', list of bools) - :param full_cov: whether to return the full covariance matrix, or just the diagonal - :type full_cov: bool - :returns: mean: posterior mean, a Numpy array, Nnew x self.input_dim - :returns: var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise - :returns: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim - - .. Note:: For multiple non-independent outputs models only. - """ - Xnew = self._add_output_index(Xnew, output) - return self.predict(Xnew, which_parts=which_parts, full_cov=full_cov, likelihood_args=likelihood_args) diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index 6e04e1fd..dd56e635 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -1,10 +1,9 @@ import numpy as np +import pylab as pb +import warnings from .. import kern from ..util.plot import gpplot, Tango, x_frame1D, x_frame2D -import pylab as pb from model import Model -import warnings -from ..likelihoods import Gaussian, Gaussian_Mixed_Noise from ..core.parameter import ObservableArray class GPBase(Model): @@ -12,17 +11,27 @@ class GPBase(Model): Gaussian process base model for holding shared behaviour between sparse_GP and GP models. """ - def __init__(self, X, likelihood, kernel, normalize_X=False, name=''): + def __init__(self, X, Y, kernel, normalize_X=False, inference_method=None, name=''): super(GPBase, self).__init__(name) + assert X.ndim == 2 self.X = ObservableArray(X) - assert len(self.X.shape) == 2 self.num_data, self.input_dim = self.X.shape + assert isinstance(kernel, kern.kern) self.kern = kernel + + assert isinstance(likelihood, likelihoods.Likelihood) self.likelihood = likelihood - assert self.X.shape[0] == self.likelihood.data.shape[0] - self.num_data, self.output_dim = self.likelihood.data.shape + + if inference_method is None: + self.inference_method = self.likelihood.preferred_inference_method + print "defaulting to ", inference_method, "for latent function inference" + else: + self.inference_method = inference_method + + assert self.X.shape[0] == Y.shape[0] + self.num_data, self.output_dim = self.Y.shape if normalize_X: self._Xoffset = X.mean(0)[None, :] @@ -34,40 +43,6 @@ class GPBase(Model): self.add_parameter(self.kern, gradient=lambda:self.kern.dK_dtheta(self.dL_dK, self.X)) self.add_parameter(self.likelihood, gradient=lambda:self.likelihood._gradients(partial=np.diag(self.dL_dK))) - #self.kern.connect_input(self.X) - - # Model.__init__(self) - # All leaf nodes should call self._set_params(self._get_params()) at - # the end -# -# def parameters_changed(self): -# self.kern.parameters_changed() -# self.likelihood.parameters_changed() - - def getstate(self): - """ - Get the current state of the class, here we return everything that is needed to recompute the model. - """ - return Model.getstate(self) + [self.X, - self.num_data, - self.input_dim, - self.kern, - self.likelihood, - self.output_dim, - self._Xoffset, - self._Xscale, - ] - - def setstate(self, state): - self._Xscale = state.pop() - self._Xoffset = state.pop() - self.output_dim = state.pop() - self.likelihood = state.pop() - self.kern = state.pop() - self.input_dim = state.pop() - self.num_data = state.pop() - self.X = state.pop() - Model.setstate(self, state) def posterior_samples_f(self,X,size=10,which_parts='all',full_cov=True): """ @@ -121,90 +96,43 @@ class GPBase(Model): return Ysim - def plot_f(self, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): + def plot_f(self, *args, **kwargs): """ - Plot the GP's view of the world, where the data is normalized and the - - In one dimension, the function is plotted with a shaded region identifying two standard deviations. - - In two dimsensions, a contour-plot shows the mean predicted function - - Not implemented in higher dimensions + Plot the GP's view of the world, where the data is normalized and before applying a likelihood. - :param samples: the number of a posteriori samples to plot - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param full_cov: - :type full_cov: bool - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle + This is a convenience function: we simply call self.plot with the + argument use_raw_predict set True. All args and kwargs are passed on to + plot. - :param output: which output to plot (for multiple output models only) - :type output: integer (first output is 0) + see also: gp_base.plot """ - if which_data == 'all': - which_data = slice(None) + kwargs['plot_raw'] = True + self.plot(*args, **kwargs) - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - if self.X.shape[1] == 1: - resolution = resolution or 200 - Xnew, xmin, xmax = x_frame1D(self.X, plot_limits=plot_limits) - - m, v = self._raw_predict(Xnew, which_parts=which_parts) - if samples: - Ysim = self.posterior_samples_f(Xnew, samples, which_parts=which_parts, full_cov=True) - for yi in Ysim.T: - ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) - gpplot(Xnew, m, m - 2 * np.sqrt(v), m + 2 * np.sqrt(v), axes=ax) - - ax.plot(self.X[which_data], self.likelihood.Y[which_data], 'kx', mew=1.5) - ax.set_xlim(xmin, xmax) - ymin, ymax = min(np.append(self.likelihood.Y, m - 2 * np.sqrt(np.diag(v)[:, None]))), max(np.append(self.likelihood.Y, m + 2 * np.sqrt(np.diag(v)[:, None]))) - ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) - ax.set_ylim(ymin, ymax) - - elif self.X.shape[1] == 2: - - resolution = resolution or 50 - Xnew, xmin, xmax, xx, yy = x_frame2D(self.X, plot_limits, resolution) - m, v = self._raw_predict(Xnew, which_parts=which_parts) - m = m.reshape(resolution, resolution).T - ax.contour(xx, yy, m, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable - ax.scatter(self.X[:, 0], self.X[:, 1], 40, self.likelihood.Y, linewidth=0, cmap=pb.cm.jet, vmin=m.min(), vmax=m.max()) # @UndefinedVariable - ax.set_xlim(xmin[0], xmax[0]) - ax.set_ylim(xmin[1], xmax[1]) - - if samples: - warnings.warn("Samples only implemented for 1 dimensional inputs.") - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - - def plot(self, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): + def plot(self, plot_limits=None, which_data_rows='all', + which_data_ycols='all', which_parts='all', fixed_inputs=[], + levels=20, samples=0, fignum=None, ax=None, resolution=None, + plot_raw=False, + linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): """ - Plot the GP with noise where the likelihood is Gaussian. - Plot the posterior of the GP. - In one dimension, the function is plotted with a shaded region identifying two standard deviations. - In two dimsensions, a contour-plot shows the mean predicted function - - Not implemented in higher dimensions + - In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed. Can plot only part of the data and part of the posterior functions - using which_data and which_functions + using which_data_rowsm which_data_ycols and which_parts :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits :type plot_limits: np.array - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y + :param which_data_rows: which of the training data to plot (default all) + :type which_data_rows: 'all' or a slice object to slice self.X, self.Y + :param which_data_ycols: when the data has several columns (independant outputs), only plot these + :type which_data_rows: 'all' or a list of integers :param which_parts: which of the kernel functions to plot (additively) :type which_parts: 'all', or list of bools + :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. + :type fixed_inputs: a list of tuples :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D :type resolution: int :param levels: number of levels to plot in a contour plot. @@ -216,216 +144,125 @@ class GPBase(Model): :param ax: axes to plot on. :type ax: axes handle :type output: integer (first output is 0) - :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. - :type fixed_inputs: a list of tuples :param linecol: color of line to plot. :type linecol: :param fillcol: color of fill :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure """ - if which_data == 'all': - which_data = slice(None) - + #deal with optional arguments + if which_data_rows == 'all': + which_data_rows = slice(None) + if which_data_ycols == 'all': + which_data_ycols = np.arange(self.output_dim) + if len(which_data_ycols)==0: + raise ValueError('No data selected for plotting') if ax is None: fig = pb.figure(num=fignum) ax = fig.add_subplot(111) - plotdims = self.input_dim - len(fixed_inputs) - if plotdims == 1: + #work out what the inputs are for plotting (1D or 2D) + fixed_dims = np.array([i for i,v in fixed_inputs]) + free_dims = np.setdiff1d(np.arange(self.input_dim),fixed_dims) + + #one dimensional plotting + if len(free_dims) == 1: + + #define the frame on which to plot resolution = resolution or 200 - Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now - - fixed_dims = np.array([i for i,v in fixed_inputs]) - freedim = np.setdiff1d(np.arange(self.input_dim),fixed_dims) - - Xnew, xmin, xmax = x_frame1D(Xu[:,freedim], plot_limits=plot_limits) + Xnew, xmin, xmax = x_frame1D(Xu[:,free_dims], plot_limits=plot_limits) Xgrid = np.empty((Xnew.shape[0],self.input_dim)) - Xgrid[:,freedim] = Xnew + Xgrid[:,free_dims] = Xnew for i,v in fixed_inputs: Xgrid[:,i] = v - m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts) + #make a prediction on the frame and plot it + if plot_raw: + m, v = self._raw_predict(Xgrid, which_parts=which_parts) + lower = m - 2*np.sqrt(v) + upper = m + 2*np.sqrt(v) + Y = self.likelihood.Y + else: + m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=False) #Compute the exact mean + m_, v_, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=True, num_samples=15000) #Apporximate the percentiles + Y = self.likelihood.data + for d in which_data_ycols: + gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) + ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5) + #optionally plot some samples if samples: #NOTE not tested with fixed_inputs - Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts, full_cov=True) + Ysim = self.posterior_samples(Xgrid, samples, which_parts=which_parts) for yi in Ysim.T: ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) #ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs. - for d in range(m.shape[1]): - gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) - ax.plot(Xu[which_data,freedim], self.likelihood.data[which_data, d], 'kx', mew=1.5) - ymin, ymax = min(np.append(self.likelihood.data, lower)), max(np.append(self.likelihood.data, upper)) + #set the limits of the plot to some sensible values + ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper)) ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) ax.set_xlim(xmin, xmax) ax.set_ylim(ymin, ymax) - elif self.X.shape[1] == 2: + #2D plotting + elif len(free_dims) == 2: + #define the frame for plotting on resolution = resolution or 50 - Xnew, _, _, xmin, xmax = x_frame2D(self.X, plot_limits, resolution) + Xu = self.X * self._Xscale + self._Xoffset #NOTE self.X are the normalized values now + Xnew, _, _, xmin, xmax = x_frame2D(Xu[:,free_dims], plot_limits, resolution) + Xgrid = np.empty((Xnew.shape[0],self.input_dim)) + Xgrid[:,free_dims] = Xnew + for i,v in fixed_inputs: + Xgrid[:,i] = v x, y = np.linspace(xmin[0], xmax[0], resolution), np.linspace(xmin[1], xmax[1], resolution) - m, _, lower, upper = self.predict(Xnew, which_parts=which_parts) - m = m.reshape(resolution, resolution).T - ax.contour(x, y, m, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) # @UndefinedVariable - Yf = self.likelihood.Y.flatten() - ax.scatter(self.X[:, 0], self.X[:, 1], 40, Yf, cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) # @UndefinedVariable + + #predict on the frame and plot + if plot_raw: + m, _ = self._raw_predict(Xgrid, which_parts=which_parts) + Y = self.likelihood.Y + else: + m, _, _, _ = self.predict(Xgrid, which_parts=which_parts,sampling=False) + Y = self.likelihood.data + for d in which_data_ycols: + m_d = m[:,d].reshape(resolution, resolution).T + ax.contour(x, y, m_d, levels, vmin=m.min(), vmax=m.max(), cmap=pb.cm.jet) + ax.scatter(self.X[which_data_rows, free_dims[0]], self.X[which_data_rows, free_dims[1]], 40, Y[which_data_rows, d], cmap=pb.cm.jet, vmin=m.min(), vmax=m.max(), linewidth=0.) + + #set the limits of the plot to some sensible values ax.set_xlim(xmin[0], xmax[0]) ax.set_ylim(xmin[1], xmax[1]) if samples: - warnings.warn("Samples only implemented for 1 dimensional inputs.") - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - - def plot_single_output_f(self, output=None, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): - """ - For a specific output, in a multioutput model, this function works just as plot_f on single output models. - - :param output: which output to plot (for multiple output models only) - :type output: integer (first output is 0) - :param samples: the number of a posteriori samples to plot - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param full_cov: - :type full_cov: bool - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle - """ - assert output is not None, "An output must be specified." - assert len(self.likelihood.noise_model_list) > output, "The model has only %s outputs." %(self.output_dim + 1) - - if which_data == 'all': - which_data = slice(None) - - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - if self.X.shape[1] == 2: - Xu = self.X[self.X[:,-1]==output ,0:1] - Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits) - Xnew_indexed = self._add_output_index(Xnew,output) - - m, v = self._raw_predict(Xnew_indexed, which_parts=which_parts) - - if samples: - Ysim = self.posterior_samples_f(Xnew_indexed, samples, which_parts=which_parts, full_cov=True) - for yi in Ysim.T: - ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) - - gpplot(Xnew, m, m - 2 * np.sqrt(v), m + 2 * np.sqrt(v), axes=ax) - ax.plot(Xu[which_data], self.likelihood.Y[self.likelihood.index==output][:,None], 'kx', mew=1.5) - ax.set_xlim(xmin, xmax) - ymin, ymax = min(np.append(self.likelihood.Y, m - 2 * np.sqrt(np.diag(v)[:, None]))), max(np.append(self.likelihood.Y, m + 2 * np.sqrt(np.diag(v)[:, None]))) - ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) - ax.set_ylim(ymin, ymax) - - elif self.X.shape[1] == 3: - raise NotImplementedError, "Plots not implemented for multioutput models with 2D inputs...yet" - #if samples: - # warnings.warn("Samples only implemented for 1 dimensional inputs.") + warnings.warn("Samples are rather difficult to plot for 2D inputs...") else: raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - def plot_single_output(self, output=None, plot_limits=None, which_data='all', which_parts='all', resolution=None, levels=20, samples=0, fignum=None, ax=None, fixed_inputs=[], linecol=Tango.colorsHex['darkBlue'],fillcol=Tango.colorsHex['lightBlue']): + + def getstate(self): """ - For a specific output, in a multioutput model, this function works just as plot_f on single output models. - - :param output: which output to plot (for multiple output models only) - :type output: integer (first output is 0) - :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits - :type plot_limits: np.array - :param which_data: which if the training data to plot (default all) - :type which_data: 'all' or a slice object to slice self.X, self.Y - :param which_parts: which of the kernel functions to plot (additively) - :type which_parts: 'all', or list of bools - :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D - :type resolution: int - :param levels: number of levels to plot in a contour plot. - :type levels: int - :param samples: the number of a posteriori samples to plot - :type samples: int - :param fignum: figure to plot on. - :type fignum: figure number - :param ax: axes to plot on. - :type ax: axes handle - :type output: integer (first output is 0) - :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v. - :type fixed_inputs: a list of tuples - :param linecol: color of line to plot. - :type linecol: - :param fillcol: color of fill - :param levels: for 2D plotting, the number of contour levels to use is ax is None, create a new figure + Get the current state of the class, here we return everything that is needed to recompute the model. """ - assert output is not None, "An output must be specified." - assert len(self.likelihood.noise_model_list) > output, "The model has only %s outputs." %(self.output_dim + 1) - if which_data == 'all': - which_data = slice(None) + return Model.getstate(self) + [self.X, + self.num_data, + self.input_dim, + self.kern, + self.likelihood, + self.output_dim, + self._Xoffset, + self._Xscale, + ] - if ax is None: - fig = pb.figure(num=fignum) - ax = fig.add_subplot(111) - - if self.X.shape[1] == 2: - resolution = resolution or 200 - - Xu = self.X[self.X[:,-1]==output,:] #keep the output of interest - Xu = self.X * self._Xscale + self._Xoffset - Xu = self.X[self.X[:,-1]==output ,0:1] #get rid of the index column - - Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits) - Xnew_indexed = self._add_output_index(Xnew,output) + def setstate(self, state): + self._Xscale = state.pop() + self._Xoffset = state.pop() + self.output_dim = state.pop() + self.likelihood = state.pop() + self.kern = state.pop() + self.input_dim = state.pop() + self.num_data = state.pop() + self.X = state.pop() + Model.setstate(self, state) - m, v, lower, upper = self.predict(Xnew_indexed, which_parts=which_parts,noise_model=output) - - if samples: #NOTE not tested with fixed_inputs - Ysim = self.posterior_samples(Xnew_indexed, samples, which_parts=which_parts, full_cov=True,noise_model=output) - for yi in Ysim.T: - ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) - - for d in range(m.shape[1]): - gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol) - ax.plot(Xu[which_data], self.likelihood.noise_model_list[output].data, 'kx', mew=1.5) - ymin, ymax = min(np.append(self.likelihood.data, lower)), max(np.append(self.likelihood.data, upper)) - ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) - ax.set_xlim(xmin, xmax) - ax.set_ylim(ymin, ymax) - - elif self.X.shape[1] == 3: - raise NotImplementedError, "Plots not implemented for multioutput models with 2D inputs...yet" - #if samples: - # warnings.warn("Samples only implemented for 1 dimensional inputs.") - - else: - raise NotImplementedError, "Cannot define a frame with more than two input dimensions" - - - def _add_output_index(self,X,output): - """ - In a multioutput model, appends an index column to X to specify the output it is related to. - - :param X: Input data - :type X: np.ndarray, N x self.input_dim - :param output: output X is related to - :type output: integer in {0,..., output_dim-1} - - .. Note:: For multiple non-independent outputs models only. - """ - - assert hasattr(self,'multioutput'), 'This function is for multiple output models only.' - - index = np.ones((X.shape[0],1))*output - return np.hstack((X,index)) diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 25e1bd3c..2ac47419 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -94,7 +94,6 @@ class SparseGP(GPBase): # factor Kmm self._Lm = jitchol(self.Kmm + self._const_jitter) - # TODO: no white kernel needed anymore, all noise in likelihood -------- # The rather complex computations of self._A if self.has_uncertain_inputs: @@ -204,27 +203,13 @@ class SparseGP(GPBase): D = 0.5 * self.data_fit return A + B + C + D + self.likelihood.Z - #def _set_params(self, p): def parameters_changed(self): - #self.Z = p[:self.num_inducing * self.input_dim].reshape(self.num_inducing, self.input_dim) - #self.kern._set_params(p[self.Z.size:self.Z.size + self.kern.num_params]) - #self.likelihood._set_params(p[self.Z.size + self.kern.num_params:]) self._compute_kernel_matrices() self._computations() self.Cpsi1V = None - # make sparse_gp compatible with gp_base gradients: self.dL_dK = self.dL_dKmm super(SparseGP, self).parameters_changed() -# def _get_params(self): -# return np.hstack([self.Z.flatten(), self.kern._get_params_transformed(), self.likelihood._get_params()]) -# -# def _get_param_names(self): -# return sum([['iip_%i_%i' % (i, j) for j in range(self.Z.shape[1])] for i in range(self.Z.shape[0])], [])\ -# + self.kern._get_param_names_transformed() + self.likelihood._get_param_names() - - #def _get_print_names(self): - # return self.kern._get_param_names_transformed() + self.likelihood._get_param_names() def update_likelihood_approximation(self, **kwargs): """ @@ -247,9 +232,6 @@ class SparseGP(GPBase): # self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0) self._set_params(self._get_params()) # update the GP -# def _log_likelihood_gradients(self): -# return np.hstack((self.dL_dZ().flatten(), self.dL_dtheta(), self.likelihood._gradients(partial=self.partial_for_likelihood))) - def dL_dtheta(self): """ Compute and return the derivative of the log marginal likelihood wrt the parameters of the kernel diff --git a/GPy/models/gp_multioutput_regression.py b/GPy/models/gp_multioutput_regression.py index 4ce3dfbc..f6ceac9d 100644 --- a/GPy/models/gp_multioutput_regression.py +++ b/GPy/models/gp_multioutput_regression.py @@ -56,3 +56,117 @@ class GPMultioutputRegression(GP): self.multioutput = True GP.__init__(self, X, likelihood, mkernel, normalize_X=normalize_X) self.ensure_default_constraints() + + def _add_output_index(self,X,output): + """ + In a multioutput model, appends an index column to X to specify the output it is related to. + + :param X: Input data + :type X: np.ndarray, N x self.input_dim + :param output: output X is related to + :type output: integer in {0,..., output_dim-1} + + .. Note:: For multiple non-independent outputs models only. + """ + + assert hasattr(self,'multioutput'), 'This function is for multiple output models only.' + + index = np.ones((X.shape[0],1))*output + return np.hstack((X,index)) + + def plot_single_output(self, X, output): + """ + A simple wrapper around self.plot, with appropriate setting of the fixed_inputs argument + """ + raise NotImplementedError + + def _raw_predict_single_output(self, _Xnew, output, which_parts='all', full_cov=False,stop=False): + """ + For a specific output, calls _raw_predict() at the new point(s) _Xnew. + This functions calls _add_output_index(), so _Xnew should not have an index column specifying the output. + --------- + + :param Xnew: The points at which to make a prediction + :type Xnew: np.ndarray, Nnew x self.input_dim + :param output: output to predict + :type output: integer in {0,..., output_dim-1} + :param which_parts: specifies which outputs kernel(s) to use in prediction + :type which_parts: ('all', list of bools) + :param full_cov: whether to return the full covariance matrix, or just the diagonal + + .. Note:: For multiple non-independent outputs models only. + """ + _Xnew = self._add_output_index(_Xnew, output) + return self._raw_predict(_Xnew, which_parts=which_parts,full_cov=full_cov, stop=stop) + + def predict_single_output(self, Xnew,output=0, which_parts='all', full_cov=False, likelihood_args=dict()): + """ + For a specific output, calls predict() at the new point(s) Xnew. + This functions calls _add_output_index(), so Xnew should not have an index column specifying the output. + + :param Xnew: The points at which to make a prediction + :type Xnew: np.ndarray, Nnew x self.input_dim + :param which_parts: specifies which outputs kernel(s) to use in prediction + :type which_parts: ('all', list of bools) + :param full_cov: whether to return the full covariance matrix, or just the diagonal + :type full_cov: bool + :returns: mean: posterior mean, a Numpy array, Nnew x self.input_dim + :returns: var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise + :returns: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim + + .. Note:: For multiple non-independent outputs models only. + """ + Xnew = self._add_output_index(Xnew, output) + return self.predict(Xnew, which_parts=which_parts, full_cov=full_cov, likelihood_args=likelihood_args) + + def plot_single_output_f(self, output=None, samples=0, plot_limits=None, which_data='all', which_parts='all', resolution=None, full_cov=False, fignum=None, ax=None): + """ + For a specific output, in a multioutput model, this function works just as plot_f on single output models. + + :param output: which output to plot (for multiple output models only) + :type output: integer (first output is 0) + :param samples: the number of a posteriori samples to plot + :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits + :param which_data: which if the training data to plot (default all) + :type which_data: 'all' or a slice object to slice self.X, self.Y + :param which_parts: which of the kernel functions to plot (additively) + :type which_parts: 'all', or list of bools + :param resolution: the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D + :type resolution: int + :param full_cov: + :type full_cov: bool + :param fignum: figure to plot on. + :type fignum: figure number + :param ax: axes to plot on. + :type ax: axes handle + """ + assert output is not None, "An output must be specified." + assert len(self.likelihood.noise_model_list) > output, "The model has only %s outputs." %(self.output_dim + 1) + + if which_data == 'all': + which_data = slice(None) + + if ax is None: + fig = pb.figure(num=fignum) + ax = fig.add_subplot(111) + + if self.X.shape[1] == 2: + Xu = self.X[self.X[:,-1]==output ,0:1] + Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits) + Xnew_indexed = self._add_output_index(Xnew,output) + + m, v = self._raw_predict(Xnew_indexed, which_parts=which_parts) + + if samples: + Ysim = self.posterior_samples_f(Xnew_indexed, samples, which_parts=which_parts, full_cov=True) + for yi in Ysim.T: + ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25) + + gpplot(Xnew, m, m - 2 * np.sqrt(v), m + 2 * np.sqrt(v), axes=ax) + ax.plot(Xu[which_data], self.likelihood.Y[self.likelihood.index==output][:,None], 'kx', mew=1.5) + ax.set_xlim(xmin, xmax) + ymin, ymax = min(np.append(self.likelihood.Y, m - 2 * np.sqrt(np.diag(v)[:, None]))), max(np.append(self.likelihood.Y, m + 2 * np.sqrt(np.diag(v)[:, None]))) + ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin) + ax.set_ylim(ymin, ymax) + +