mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-11 04:52:37 +02:00
Moved posterior_covariance to Posterior class
This commit is contained in:
parent
0e2ec01839
commit
ae3ea375f8
3 changed files with 27 additions and 17 deletions
|
|
@ -8,7 +8,6 @@ from .mapping import Mapping
|
||||||
from .. import likelihoods
|
from .. import likelihoods
|
||||||
from .. import kern
|
from .. import kern
|
||||||
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
|
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
|
||||||
from ..util.linalg import dtrtrs
|
|
||||||
from ..util.normalizer import Standardize
|
from ..util.normalizer import Standardize
|
||||||
from paramz import ObsAr
|
from paramz import ObsAr
|
||||||
|
|
||||||
|
|
@ -680,23 +679,11 @@ class GP(Model):
|
||||||
mu_star, var_star = self._raw_predict(x_test)
|
mu_star, var_star = self._raw_predict(x_test)
|
||||||
return self.likelihood.log_predictive_density_sampling(y_test, mu_star, var_star, Y_metadata=Y_metadata, num_samples=num_samples)
|
return self.likelihood.log_predictive_density_sampling(y_test, mu_star, var_star, Y_metadata=Y_metadata, num_samples=num_samples)
|
||||||
|
|
||||||
def posterior_covariance(self, X1, X2):
|
def posterior_covariance_between_points(self, X1, X2):
|
||||||
"""
|
"""
|
||||||
Computes the posterior covariance between points.
|
Computes the posterior covariance between points.
|
||||||
|
|
||||||
:param X1: some input observations
|
:param X1: some input observations
|
||||||
:param X2: other input observations
|
:param X2: other input observations
|
||||||
"""
|
"""
|
||||||
# ndim == 3 is a model for missing data
|
return self.posterior.covariance_between_points(self.kern, self.X, X1, X2)
|
||||||
if self.posterior.woodbury_chol.ndim != 2:
|
|
||||||
raise RuntimeError("This method does not support posterior for missing data models")
|
|
||||||
|
|
||||||
Kx1 = self.kern.K(self.X, X1)
|
|
||||||
Kx2 = self.kern.K(self.X, X2)
|
|
||||||
K12 = self.kern.K(X1, X2)
|
|
||||||
|
|
||||||
tmp1 = dtrtrs(self.posterior.woodbury_chol, Kx1)[0]
|
|
||||||
tmp2 = dtrtrs(self.posterior.woodbury_chol, Kx2)[0]
|
|
||||||
var = K12 - tmp1.T.dot(tmp2)
|
|
||||||
|
|
||||||
return var
|
|
||||||
|
|
|
||||||
|
|
@ -101,6 +101,29 @@ class Posterior(object):
|
||||||
#self._covariance = self._K - self._K.dot(self.woodbury_inv).dot(self._K)
|
#self._covariance = self._K - self._K.dot(self.woodbury_inv).dot(self._K)
|
||||||
return self._covariance
|
return self._covariance
|
||||||
|
|
||||||
|
def covariance_between_points(self, kern, X, X1, X2):
|
||||||
|
"""
|
||||||
|
Computes the posterior covariance between points.
|
||||||
|
|
||||||
|
:param kern: GP kernel
|
||||||
|
:param X: current input observations
|
||||||
|
:param X1: some input observations
|
||||||
|
:param X2: other input observations
|
||||||
|
"""
|
||||||
|
# ndim == 3 is a model for missing data
|
||||||
|
if self.woodbury_chol.ndim != 2:
|
||||||
|
raise RuntimeError("This method does not support posterior for missing data models")
|
||||||
|
|
||||||
|
Kx1 = kern.K(X, X1)
|
||||||
|
Kx2 = kern.K(X, X2)
|
||||||
|
K12 = kern.K(X1, X2)
|
||||||
|
|
||||||
|
tmp1 = dtrtrs(self.woodbury_chol, Kx1)[0]
|
||||||
|
tmp2 = dtrtrs(self.woodbury_chol, Kx2)[0]
|
||||||
|
var = K12 - tmp1.T.dot(tmp2)
|
||||||
|
|
||||||
|
return var
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def precision(self):
|
def precision(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -1084,7 +1084,7 @@ class GradientTests(np.testing.TestCase):
|
||||||
Y = np.array([[1], [2]])
|
Y = np.array([[1], [2]])
|
||||||
m = GPy.models.GPRegression(X1, Y, kernel=k)
|
m = GPy.models.GPRegression(X1, Y, kernel=k)
|
||||||
|
|
||||||
result = m.posterior_covariance(X1, X2)
|
result = m.posterior_covariance_between_points(X1, X2)
|
||||||
expected = np.array([[0.4, 2.2], [1.0, 1.0]]) / 3.0
|
expected = np.array([[0.4, 2.2], [1.0, 1.0]]) / 3.0
|
||||||
|
|
||||||
self.assertTrue(np.allclose(result, expected))
|
self.assertTrue(np.allclose(result, expected))
|
||||||
|
|
@ -1095,7 +1095,7 @@ class GradientTests(np.testing.TestCase):
|
||||||
m = _create_missing_data_model(k, Q)
|
m = _create_missing_data_model(k, Q)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError):
|
with self.assertRaises(RuntimeError):
|
||||||
m.posterior_covariance(np.array([[1], [2]]), np.array([[3], [4]]))
|
m.posterior_covariance_between_points(np.array([[1], [2]]), np.array([[3], [4]]))
|
||||||
|
|
||||||
def _create_missing_data_model(kernel, Q):
|
def _create_missing_data_model(kernel, Q):
|
||||||
D1, D2, D3, N, num_inducing = 13, 5, 8, 400, 3
|
D1, D2, D3, N, num_inducing = 13, 5, 8, 400, 3
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue