diff --git a/GPy/core/gp.py b/GPy/core/gp.py index 6e28d5b9..d6f42c1c 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -8,7 +8,6 @@ from .mapping import Mapping from .. import likelihoods from .. import kern from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation -from ..util.linalg import dtrtrs from ..util.normalizer import Standardize from paramz import ObsAr @@ -680,23 +679,11 @@ class GP(Model): mu_star, var_star = self._raw_predict(x_test) return self.likelihood.log_predictive_density_sampling(y_test, mu_star, var_star, Y_metadata=Y_metadata, num_samples=num_samples) - def posterior_covariance(self, X1, X2): + def posterior_covariance_between_points(self, X1, X2): """ Computes the posterior covariance between points. :param X1: some input observations :param X2: other input observations """ - # ndim == 3 is a model for missing data - if self.posterior.woodbury_chol.ndim != 2: - raise RuntimeError("This method does not support posterior for missing data models") - - Kx1 = self.kern.K(self.X, X1) - Kx2 = self.kern.K(self.X, X2) - K12 = self.kern.K(X1, X2) - - tmp1 = dtrtrs(self.posterior.woodbury_chol, Kx1)[0] - tmp2 = dtrtrs(self.posterior.woodbury_chol, Kx2)[0] - var = K12 - tmp1.T.dot(tmp2) - - return var + return self.posterior.covariance_between_points(self.kern, self.X, X1, X2) diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index 40ea5c73..4a8dea45 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -101,6 +101,29 @@ class Posterior(object): #self._covariance = self._K - self._K.dot(self.woodbury_inv).dot(self._K) return self._covariance + def covariance_between_points(self, kern, X, X1, X2): + """ + Computes the posterior covariance between points. + + :param kern: GP kernel + :param X: current input observations + :param X1: some input observations + :param X2: other input observations + """ + # ndim == 3 is a model for missing data + if self.woodbury_chol.ndim != 2: + raise RuntimeError("This method does not support posterior for missing data models") + + Kx1 = kern.K(X, X1) + Kx2 = kern.K(X, X2) + K12 = kern.K(X1, X2) + + tmp1 = dtrtrs(self.woodbury_chol, Kx1)[0] + tmp2 = dtrtrs(self.woodbury_chol, Kx2)[0] + var = K12 - tmp1.T.dot(tmp2) + + return var + @property def precision(self): """ diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index c8b10a54..c8d097a3 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -1084,7 +1084,7 @@ class GradientTests(np.testing.TestCase): Y = np.array([[1], [2]]) m = GPy.models.GPRegression(X1, Y, kernel=k) - result = m.posterior_covariance(X1, X2) + result = m.posterior_covariance_between_points(X1, X2) expected = np.array([[0.4, 2.2], [1.0, 1.0]]) / 3.0 self.assertTrue(np.allclose(result, expected)) @@ -1095,7 +1095,7 @@ class GradientTests(np.testing.TestCase): m = _create_missing_data_model(k, Q) with self.assertRaises(RuntimeError): - m.posterior_covariance(np.array([[1], [2]]), np.array([[3], [4]])) + m.posterior_covariance_between_points(np.array([[1], [2]]), np.array([[3], [4]])) def _create_missing_data_model(kernel, Q): D1, D2, D3, N, num_inducing = 13, 5, 8, 400, 3