From ea3bfbb59715a4801f844295d53c63bda17f0e47 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Tue, 18 Aug 2015 08:00:47 +0100 Subject: [PATCH] minor bugfixes in plotting: quantiles are now computed using predict_kw correctly --- GPy/core/gp.py | 6 ++++-- GPy/models/gp_var_gauss.py | 3 ++- GPy/plotting/matplot_dep/models_plots.py | 3 ++- GPy/testing/inference_tests.py | 2 +- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 12fb3d27..aeee34ac 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -249,7 +249,7 @@ class GP(Model): mean, var = self.likelihood.predictive_values(mu, var, full_cov, Y_metadata=Y_metadata) return mean, var - def predict_quantiles(self, X, quantiles=(2.5, 97.5), Y_metadata=None): + def predict_quantiles(self, X, quantiles=(2.5, 97.5), Y_metadata=None, kern=None): """ Get the predictive quantiles around the prediction at X @@ -257,10 +257,12 @@ class GP(Model): :type X: np.ndarray (Xnew x self.input_dim) :param quantiles: tuple of quantiles, default is (2.5, 97.5) which is the 95% interval :type quantiles: tuple + :param kern: optional kernel to use for prediction + :type predict_kw: dict :returns: list of quantiles for each X and predictive quantiles for interval combination :rtype: [np.ndarray (Xnew x self.output_dim), np.ndarray (Xnew x self.output_dim)] """ - m, v = self._raw_predict(X, full_cov=False) + m, v = self._raw_predict(X, full_cov=False, kern=kern) if self.normalizer is not None: m, v = self.normalizer.inverse_mean(m), self.normalizer.inverse_variance(v) return self.likelihood.predictive_quantiles(m, v, quantiles, Y_metadata=Y_metadata) diff --git a/GPy/models/gp_var_gauss.py b/GPy/models/gp_var_gauss.py index 729b6bb8..9f2229f0 100644 --- a/GPy/models/gp_var_gauss.py +++ b/GPy/models/gp_var_gauss.py @@ -60,7 +60,8 @@ class GPVariationalGaussianApproximation(Model): var = np.diag(Sigma).reshape(-1,1) F, dF_dm, dF_dv, dF_dthetaL = self.likelihood.variational_expectations(self.Y, m, var, Y_metadata=self.Y_metadata) - self.likelihood.gradient = dF_dthetaL.sum(1).sum(1) + if dF_dthetaL is not None: + self.likelihood.gradient = dF_dthetaL.sum(1).sum(1) dF_da = np.dot(K, dF_dm) SigmaB = Sigma*self.beta dF_db = -np.diag(Sigma.dot(np.diag(dF_dv.flatten())).dot(SigmaB))*2 diff --git a/GPy/plotting/matplot_dep/models_plots.py b/GPy/plotting/matplot_dep/models_plots.py index 78c80cb5..d0f6c952 100644 --- a/GPy/plotting/matplot_dep/models_plots.py +++ b/GPy/plotting/matplot_dep/models_plots.py @@ -110,7 +110,8 @@ def plot_fit(model, plot_limits=None, which_data_rows='all', else: Y_metadata['output_index'] = extra_data m, v = model.predict(Xgrid, full_cov=False, Y_metadata=Y_metadata, **predict_kw) - lower, upper = model.predict_quantiles(Xgrid, Y_metadata=Y_metadata) + fmu, fv = model._raw_predict(Xgrid, full_cov=False, **predict_kw) + lower, upper = model.likelihood.predictive_quantiles(fmu, fv, (2.5, 97.5), Y_metadata=Y_metadata) for d in which_data_ycols: diff --git a/GPy/testing/inference_tests.py b/GPy/testing/inference_tests.py index cd85235d..c1fce8b9 100644 --- a/GPy/testing/inference_tests.py +++ b/GPy/testing/inference_tests.py @@ -8,7 +8,7 @@ The test cases for various inference algorithms import unittest, itertools import numpy as np import GPy - +#np.seterr(invalid='raise') class InferenceXTestCase(unittest.TestCase):