From 7b5422b6946315a97f18850dd31cffeeff5c85c8 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 22 Jan 2016 11:26:29 +0000 Subject: [PATCH 1/6] [plotting&kern] bugfixes in plotting and kernel size --- GPy/core/gp.py | 9 +++++---- GPy/kern/src/kern.py | 4 ++-- GPy/kern/src/stationary.py | 4 ++-- GPy/plotting/gpy_plot/gp_plots.py | 16 ++++++++-------- GPy/plotting/gpy_plot/plot_util.py | 5 ++++- 5 files changed, 21 insertions(+), 17 deletions(-) diff --git a/GPy/core/gp.py b/GPy/core/gp.py index ae710355..ea2ed140 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -365,13 +365,14 @@ class GP(Model): mean_jac[:,:,i] = kern.gradients_X(self.posterior.woodbury_vector[:,i:i+1].T, Xnew, self._predictive_variable) dK_dXnew_full = np.empty((self._predictive_variable.shape[0], Xnew.shape[0], Xnew.shape[1])) + one = np.ones((1,1)) for i in range(self._predictive_variable.shape[0]): - dK_dXnew_full[i] = kern.gradients_X([[1.]], Xnew, self._predictive_variable[[i]]) + dK_dXnew_full[i] = kern.gradients_X(one, Xnew, self._predictive_variable[[i]]) if full_cov: - dK2_dXdX = kern.gradients_XX([[1.]], Xnew) + dK2_dXdX = kern.gradients_XX(one, Xnew) else: - dK2_dXdX = kern.gradients_XX_diag([[1.]], Xnew) + dK2_dXdX = kern.gradients_XX_diag(one, Xnew) def compute_cov_inner(wi): if full_cov: @@ -458,7 +459,7 @@ class GP(Model): m, v = self._raw_predict(X, full_cov=full_cov, **predict_kwargs) if self.normalizer is not None: m, v = self.normalizer.inverse_mean(m), self.normalizer.inverse_variance(v) - + def sim_one_dim(m, v): if not full_cov: return np.random.multivariate_normal(m.flatten(), np.diag(v.flatten()), size).T diff --git a/GPy/kern/src/kern.py b/GPy/kern/src/kern.py index ad41355f..6a746092 100644 --- a/GPy/kern/src/kern.py +++ b/GPy/kern/src/kern.py @@ -61,12 +61,12 @@ class Kern(Parameterized): self.psicomp = PSICOMP_GH() def __setstate__(self, state): - self._all_dims_active = range(0, max(state['active_dims'])+1) + self._all_dims_active = np.arange(0, max(state['active_dims'])+1) super(Kern, self).__setstate__(state) @property def _effective_input_dim(self): - return self._all_dims_active.size + return np.size(self._all_dims_active) @Cache_this(limit=20) def _slice_X(self, X): diff --git a/GPy/kern/src/stationary.py b/GPy/kern/src/stationary.py index 106e0098..7b4c3625 100644 --- a/GPy/kern/src/stationary.py +++ b/GPy/kern/src/stationary.py @@ -97,7 +97,7 @@ class Stationary(Kern): r = self._scaled_dist(X, X2) return self.K_of_r(r) - @Cache_this(limit=20, ignore_args=()) + @Cache_this(limit=3, ignore_args=()) def dK_dr_via_X(self, X, X2): #a convenience function, so we can cache dK_dr return self.dK_dr(self._scaled_dist(X, X2)) @@ -127,7 +127,7 @@ class Stationary(Kern): r2 = np.clip(r2, 0, np.inf) return np.sqrt(r2) - @Cache_this(limit=20, ignore_args=()) + @Cache_this(limit=3, ignore_args=()) def _scaled_dist(self, X, X2=None): """ Efficiently compute the scaled distance, r. diff --git a/GPy/plotting/gpy_plot/gp_plots.py b/GPy/plotting/gpy_plot/gp_plots.py index 4d467e62..353dc7fb 100644 --- a/GPy/plotting/gpy_plot/gp_plots.py +++ b/GPy/plotting/gpy_plot/gp_plots.py @@ -46,7 +46,7 @@ def plot_mean(self, plot_limits=None, fixed_inputs=None, """ Plot the mean of the GP. - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. @@ -116,7 +116,7 @@ def plot_confidence(self, lower=2.5, upper=97.5, plot_limits=None, fixed_inputs= E.g. the 95% confidence interval is $2.5, 97.5$. Note: Only implemented for one dimension! - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. @@ -170,7 +170,7 @@ def plot_samples(self, plot_limits=None, fixed_inputs=None, """ Plot the mean of the GP. - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. @@ -231,7 +231,7 @@ def plot_density(self, plot_limits=None, fixed_inputs=None, E.g. the 95% confidence interval is $2.5, 97.5$. Note: Only implemented for one dimension! - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. @@ -288,7 +288,7 @@ def plot(self, plot_limits=None, fixed_inputs=None, """ Convenience function for plotting the fit of a GP. - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. @@ -330,6 +330,8 @@ def plot(self, plot_limits=None, fixed_inputs=None, # It does not make sense to plot the data (which lives not in the latent function space) into latent function space. plot_data = False plots = {} + if hasattr(self, 'Z') and plot_inducing: + plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing')) if plot_data: plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data")) plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error")) @@ -340,8 +342,6 @@ def plot(self, plot_limits=None, fixed_inputs=None, get_which_data_ycols(self, which_data_ycols), predict_kw, samples_likelihood) plots.update(_plot_samples(canvas, helper_data, helper_prediction, projection, "Lik Samples")) - if hasattr(self, 'Z') and plot_inducing: - plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing')) return pl().add_to_canvas(canvas, plots, legend=legend) @@ -362,7 +362,7 @@ def plot_f(self, plot_limits=None, fixed_inputs=None, If you want fine graned control use the specific plotting functions supplied in the model. - You can deactivate the legend for this one plot by supplying None to label. + You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. diff --git a/GPy/plotting/gpy_plot/plot_util.py b/GPy/plotting/gpy_plot/plot_util.py index 254886a2..d760d1b7 100644 --- a/GPy/plotting/gpy_plot/plot_util.py +++ b/GPy/plotting/gpy_plot/plot_util.py @@ -285,7 +285,10 @@ def get_x_y_var(model): X = model.X.mean.values X_variance = model.X.variance.values else: - X = model.X.values + try: + X = model.X.values + except AttributeError: + X = model.X X_variance = None try: Y = model.Y.values From 095a8607b29c392d1e91f1395e286155e8427fa1 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 22 Jan 2016 11:26:53 +0000 Subject: [PATCH 2/6] =?UTF-8?q?Bump=20version:=200.9.5=20=E2=86=92=200.9.6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- GPy/__version__.py | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/GPy/__version__.py b/GPy/__version__.py index f8c6ac7f..50533e30 100644 --- a/GPy/__version__.py +++ b/GPy/__version__.py @@ -1 +1 @@ -__version__ = "0.9.5" +__version__ = "0.9.6" diff --git a/setup.cfg b/setup.cfg index c8c61b7b..3a2581bd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.5 +current_version = 0.9.6 tag = True commit = True From fd979b843f3a3e758605e8e9b9d06d570abea9ab Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Thu, 28 Jan 2016 12:12:57 +0000 Subject: [PATCH 3/6] [plotting] subsampling print waring corrected --- GPy/plotting/gpy_plot/plot_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPy/plotting/gpy_plot/plot_util.py b/GPy/plotting/gpy_plot/plot_util.py index d760d1b7..a910cd6f 100644 --- a/GPy/plotting/gpy_plot/plot_util.py +++ b/GPy/plotting/gpy_plot/plot_util.py @@ -199,7 +199,7 @@ def subsample_X(X, labels, num_samples=1000): num_samples and the returned subsampled X. """ if X.shape[0] > num_samples: - print("Warning: subsampling X, as it has more samples then 1000. X.shape={!s}".format(X.shape)) + print("Warning: subsampling X, as it has more samples then {}. X.shape={!s}".format(int(num_samples), X.shape)) if labels is not None: subsample = [] for _, _, _, _, index, _ in scatter_label_generator(labels, X, (0, None, None)): From 98daaba57e1db392f5fb5af59c74d9832c36309b Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 19 Feb 2016 14:55:36 +0000 Subject: [PATCH 4/6] [plotting] xlim setting --- GPy/core/gp.py | 2 +- GPy/inference/optimization/optimization.py | 286 --------------------- GPy/plotting/gpy_plot/gp_plots.py | 12 +- 3 files changed, 11 insertions(+), 289 deletions(-) delete mode 100644 GPy/inference/optimization/optimization.py diff --git a/GPy/core/gp.py b/GPy/core/gp.py index ea2ed140..7a29664f 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -181,7 +181,7 @@ class GP(Model): def parameters_changed(self): """ Method that is called upon any changes to :class:`~GPy.core.parameterization.param.Param` variables within the model. - In particular in the GP class this method reperforms inference, recalculating the posterior and log marginal likelihood and gradients of the model + In particular in the GP class this method re-performs inference, recalculating the posterior and log marginal likelihood and gradients of the model .. warning:: This method is not designed to be called manually, the framework is set up to automatically call this method upon changes to parameters, if you call diff --git a/GPy/inference/optimization/optimization.py b/GPy/inference/optimization/optimization.py deleted file mode 100644 index ceceb222..00000000 --- a/GPy/inference/optimization/optimization.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt). -# Licensed under the BSD 3-clause license (see LICENSE.txt) - -import datetime as dt -from scipy import optimize -from warnings import warn - -try: - import rasmussens_minimize as rasm - rasm_available = True -except ImportError: - rasm_available = False -from .scg import SCG - -class Optimizer(object): - """ - Superclass for all the optimizers. - - :param x_init: initial set of parameters - :param f_fp: function that returns the function AND the gradients at the same time - :param f: function to optimize - :param fp: gradients - :param messages: print messages from the optimizer? - :type messages: (True | False) - :param max_f_eval: maximum number of function evaluations - - :rtype: optimizer object. - - """ - def __init__(self, x_init, messages=False, model=None, max_f_eval=1e4, max_iters=1e3, - ftol=None, gtol=None, xtol=None, bfgs_factor=None): - self.opt_name = None - self.x_init = x_init - # Turning messages off and using internal structure for print outs: - self.messages = False #messages - self.f_opt = None - self.x_opt = None - self.funct_eval = None - self.status = None - self.max_f_eval = int(max_iters) - self.max_iters = int(max_iters) - self.bfgs_factor = bfgs_factor - self.trace = None - self.time = "Not available" - self.xtol = xtol - self.gtol = gtol - self.ftol = ftol - self.model = model - - def run(self, **kwargs): - start = dt.datetime.now() - self.opt(**kwargs) - end = dt.datetime.now() - self.time = str(end - start) - - def opt(self, f_fp=None, f=None, fp=None): - raise NotImplementedError("this needs to be implemented to use the optimizer class") - - def __str__(self): - diagnostics = "Optimizer: \t\t\t\t %s\n" % self.opt_name - diagnostics += "f(x_opt): \t\t\t\t %.3f\n" % self.f_opt - diagnostics += "Number of function evaluations: \t %d\n" % self.funct_eval - diagnostics += "Optimization status: \t\t\t %s\n" % self.status - diagnostics += "Time elapsed: \t\t\t\t %s\n" % self.time - return diagnostics - -class opt_tnc(Optimizer): - def __init__(self, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "TNC (Scipy implementation)" - - def opt(self, f_fp=None, f=None, fp=None): - """ - Run the TNC optimizer - - """ - tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached', - 'Line search failed', 'Function is constant'] - - assert f_fp != None, "TNC requires f_fp" - - opt_dict = {} - if self.xtol is not None: - opt_dict['xtol'] = self.xtol - if self.ftol is not None: - opt_dict['ftol'] = self.ftol - if self.gtol is not None: - opt_dict['pgtol'] = self.gtol - - opt_result = optimize.fmin_tnc(f_fp, self.x_init, messages=self.messages, - maxfun=self.max_f_eval, **opt_dict) - self.x_opt = opt_result[0] - self.f_opt = f_fp(self.x_opt)[0] - self.funct_eval = opt_result[1] - self.status = tnc_rcstrings[opt_result[2]] - -class opt_lbfgsb(Optimizer): - def __init__(self, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "L-BFGS-B (Scipy implementation)" - - def opt(self, f_fp=None, f=None, fp=None): - """ - Run the optimizer - - """ - rcstrings = ['Converged', 'Maximum number of f evaluations reached', 'Error'] - - assert f_fp != None, "BFGS requires f_fp" - - if self.messages: - iprint = 1 - else: - iprint = -1 - - opt_dict = {} - if self.xtol is not None: - print("WARNING: l-bfgs-b doesn't have an xtol arg, so I'm going to ignore it") - if self.ftol is not None: - print("WARNING: l-bfgs-b doesn't have an ftol arg, so I'm going to ignore it") - if self.gtol is not None: - opt_dict['pgtol'] = self.gtol - if self.bfgs_factor is not None: - opt_dict['factr'] = self.bfgs_factor - - opt_result = optimize.fmin_l_bfgs_b(f_fp, self.x_init, iprint=iprint, - maxfun=self.max_iters,maxiter=self.max_iters, **opt_dict) - self.x_opt = opt_result[0] - self.f_opt = f_fp(self.x_opt)[0] - self.funct_eval = opt_result[2]['funcalls'] - self.status = rcstrings[opt_result[2]['warnflag']] - - #a more helpful error message is available in opt_result in the Error case - if opt_result[2]['warnflag']==2: - self.status = 'Error' + str(opt_result[2]['task']) - -class opt_bfgs(Optimizer): - def __init__(self, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "BFGS (Scipy implementation)" - - def opt(self, f_fp=None, f=None, fp=None): - """ - Run the optimizer - - """ - rcstrings = ['','Maximum number of iterations exceeded', 'Gradient and/or function calls not changing'] - - opt_dict = {} - if self.xtol is not None: - print("WARNING: bfgs doesn't have an xtol arg, so I'm going to ignore it") - if self.ftol is not None: - print("WARNING: bfgs doesn't have an ftol arg, so I'm going to ignore it") - if self.gtol is not None: - opt_dict['pgtol'] = self.gtol - - opt_result = optimize.fmin_bfgs(f, self.x_init, fp, disp=self.messages, - maxiter=self.max_iters, full_output=True, **opt_dict) - self.x_opt = opt_result[0] - self.f_opt = f_fp(self.x_opt)[0] - self.funct_eval = opt_result[4] - self.status = rcstrings[opt_result[6]] - -class opt_simplex(Optimizer): - def __init__(self, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "Nelder-Mead simplex routine (via Scipy)" - - def opt(self, f_fp=None, f=None, fp=None): - """ - The simplex optimizer does not require gradients. - """ - - statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached'] - - opt_dict = {} - if self.xtol is not None: - opt_dict['xtol'] = self.xtol - if self.ftol is not None: - opt_dict['ftol'] = self.ftol - if self.gtol is not None: - print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") - - opt_result = optimize.fmin(f, self.x_init, (), disp=self.messages, - maxfun=self.max_f_eval, full_output=True, **opt_dict) - - self.x_opt = opt_result[0] - self.f_opt = opt_result[1] - self.funct_eval = opt_result[3] - self.status = statuses[opt_result[4]] - self.trace = None - - -class opt_rasm(Optimizer): - def __init__(self, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "Rasmussen's Conjugate Gradient" - - def opt(self, f_fp=None, f=None, fp=None): - """ - Run Rasmussen's Conjugate Gradient optimizer - """ - - assert f_fp != None, "Rasmussen's minimizer requires f_fp" - statuses = ['Converged', 'Line search failed', 'Maximum number of f evaluations reached', - 'NaNs in optimization'] - - opt_dict = {} - if self.xtol is not None: - print("WARNING: minimize doesn't have an xtol arg, so I'm going to ignore it") - if self.ftol is not None: - print("WARNING: minimize doesn't have an ftol arg, so I'm going to ignore it") - if self.gtol is not None: - print("WARNING: minimize doesn't have an gtol arg, so I'm going to ignore it") - - opt_result = rasm.minimize(self.x_init, f_fp, (), messages=self.messages, - maxnumfuneval=self.max_f_eval) - self.x_opt = opt_result[0] - self.f_opt = opt_result[1][-1] - self.funct_eval = opt_result[2] - self.status = statuses[opt_result[3]] - - self.trace = opt_result[1] - -class opt_SCG(Optimizer): - def __init__(self, *args, **kwargs): - if 'max_f_eval' in kwargs: - warn("max_f_eval deprecated for SCG optimizer: use max_iters instead!\nIgnoring max_f_eval!", FutureWarning) - Optimizer.__init__(self, *args, **kwargs) - - self.opt_name = "Scaled Conjugate Gradients" - - def opt(self, f_fp=None, f=None, fp=None): - assert not f is None - assert not fp is None - - opt_result = SCG(f, fp, self.x_init, display=self.messages, - maxiters=self.max_iters, - max_f_eval=self.max_f_eval, - xtol=self.xtol, ftol=self.ftol, - gtol=self.gtol) - - self.x_opt = opt_result[0] - self.trace = opt_result[1] - self.f_opt = self.trace[-1] - self.funct_eval = opt_result[2] - self.status = opt_result[3] - -class Opt_Adadelta(Optimizer): - def __init__(self, step_rate=0.1, decay=0.9, momentum=0, *args, **kwargs): - Optimizer.__init__(self, *args, **kwargs) - self.opt_name = "Adadelta (climin)" - self.step_rate=step_rate - self.decay = decay - self.momentum = momentum - - def opt(self, f_fp=None, f=None, fp=None): - assert not fp is None - - import climin - - opt = climin.adadelta.Adadelta(self.x_init, fp, step_rate=self.step_rate, decay=self.decay, momentum=self.momentum) - - for info in opt: - if info['n_iter']>=self.max_iters: - self.x_opt = opt.wrt - self.status = 'maximum number of function evaluations exceeded ' - break - -def get_optimizer(f_min): - - optimizers = {'fmin_tnc': opt_tnc, - 'simplex': opt_simplex, - 'lbfgsb': opt_lbfgsb, - 'org-bfgs': opt_bfgs, - 'scg': opt_SCG, - 'adadelta':Opt_Adadelta} - - if rasm_available: - optimizers['rasmussen'] = opt_rasm - - for opt_name in optimizers.keys(): - if opt_name.lower().find(f_min.lower()) != -1: - return optimizers[opt_name] - - raise KeyError('No optimizer was found matching the name: %s' % f_min) diff --git a/GPy/plotting/gpy_plot/gp_plots.py b/GPy/plotting/gpy_plot/gp_plots.py index 353dc7fb..3e265842 100644 --- a/GPy/plotting/gpy_plot/gp_plots.py +++ b/GPy/plotting/gpy_plot/gp_plots.py @@ -319,9 +319,17 @@ def plot(self, plot_limits=None, fixed_inputs=None, :param {2d|3d} projection: plot in 2d or 3d? :param bool legend: convenience, whether to put a legend on the plot or not. """ - canvas, _ = pl().new_canvas(projection=projection, **kwargs) X = get_x_y_var(self)[0] helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) + xmin, xmax = helper_data[5:7] + free_dims = helper_data[1] + + if not 'xlim' in kwargs: + kwargs['xlim'] = (xmin[0], xmax[0]) + if not 'ylim' in kwargs and len(free_dims) == 2: + kwargs['ylim'] = (xmin[1], xmax[1]) + + canvas, _ = pl().new_canvas(projection=projection, **kwargs) helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, np.linspace(2.5, 97.5, levels*2) if plot_density else (lower,upper), get_which_data_ycols(self, which_data_ycols), @@ -389,7 +397,7 @@ def plot_f(self, plot_limits=None, fixed_inputs=None, :param dict error_kwargs: kwargs for the error plot for the plotting library you are using :param kwargs plot_kwargs: kwargs for the data plot for the plotting library you are using """ - plot(self, plot_limits, fixed_inputs, resolution, True, + return plot(self, plot_limits, fixed_inputs, resolution, True, apply_link, which_data_ycols, which_data_rows, visible_dims, levels, samples, 0, lower, upper, plot_data, plot_inducing, From 3761d186f3debcad86bba9d43ef48977420fed2c Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Fri, 19 Feb 2016 17:15:29 +0000 Subject: [PATCH 5/6] [plotting] limits added --- GPy/plotting/gpy_plot/gp_plots.py | 4 ++-- GPy/plotting/gpy_plot/plot_util.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/GPy/plotting/gpy_plot/gp_plots.py b/GPy/plotting/gpy_plot/gp_plots.py index 3e265842..7439bd9d 100644 --- a/GPy/plotting/gpy_plot/gp_plots.py +++ b/GPy/plotting/gpy_plot/gp_plots.py @@ -341,8 +341,8 @@ def plot(self, plot_limits=None, fixed_inputs=None, if hasattr(self, 'Z') and plot_inducing: plots.update(_plot_inducing(self, canvas, visible_dims, projection, 'Inducing')) if plot_data: - plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data")) - plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, visible_dims, projection, "Data Error")) + plots.update(_plot_data(self, canvas, which_data_rows, which_data_ycols, free_dims, projection, "Data")) + plots.update(_plot_data_error(self, canvas, which_data_rows, which_data_ycols, free_dims, projection, "Data Error")) plots.update(_plot(self, canvas, plots, helper_data, helper_prediction, levels, plot_inducing, plot_density, projection)) if plot_raw and (samples_likelihood > 0): helper_prediction = helper_predict_with_model(self, helper_data[2], False, diff --git a/GPy/plotting/gpy_plot/plot_util.py b/GPy/plotting/gpy_plot/plot_util.py index a910cd6f..4e71a3bc 100644 --- a/GPy/plotting/gpy_plot/plot_util.py +++ b/GPy/plotting/gpy_plot/plot_util.py @@ -349,7 +349,7 @@ def x_frame1D(X,plot_limits=None,resolution=None): xmin,xmax = X.min(0),X.max(0) xmin, xmax = xmin-0.25*(xmax-xmin), xmax+0.25*(xmax-xmin) elif len(plot_limits) == 2: - xmin, xmax = plot_limits + xmin, xmax = map(np.atleast_1d, plot_limits) else: raise ValueError("Bad limits for plotting") From bfb0ecdcb4b232e3b389d82a9ecc63e739aa7184 Mon Sep 17 00:00:00 2001 From: Zhenwen Dai Date: Fri, 19 Feb 2016 17:59:50 +0000 Subject: [PATCH 6/6] add test case for hmc sampler --- GPy/testing/inference_tests.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/GPy/testing/inference_tests.py b/GPy/testing/inference_tests.py index 7a091589..267ce594 100644 --- a/GPy/testing/inference_tests.py +++ b/GPy/testing/inference_tests.py @@ -51,5 +51,20 @@ class InferenceXTestCase(unittest.TestCase): np.testing.assert_array_almost_equal(m.X, mi.X, decimal=2) +class HMCSamplerTest(unittest.TestCase): + + def test_sampling(self): + np.random.seed(1) + x = np.linspace(0.,2*np.pi,100)[:,None] + y = -np.cos(x)+np.random.randn(*x.shape)*0.3+1 + + m = GPy.models.GPRegression(x,y) + m.kern.lengthscale.set_prior(GPy.priors.Gamma.from_EV(1.,10.)) + m.kern.variance.set_prior(GPy.priors.Gamma.from_EV(1.,10.)) + m.likelihood.variance.set_prior(GPy.priors.Gamma.from_EV(1.,10.)) + + hmc = GPy.inference.mcmc.HMC(m,stepsize=1e-2) + s = hmc.sample(num_samples=3) + if __name__ == "__main__": unittest.main()