mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 04:22:38 +02:00
Added the structure to posterior.py to enable...
to enable computation from the posterior mean and variance, instead of the woodbury componnents This iss the first step in being able to use this structre for EP and the laplace approximation.
This commit is contained in:
parent
5ec64d2279
commit
83a49f132a
3 changed files with 58 additions and 11 deletions
|
|
@ -183,11 +183,10 @@ class GPBase(Model):
|
||||||
m, v = self._raw_predict(Xgrid, which_parts=which_parts)
|
m, v = self._raw_predict(Xgrid, which_parts=which_parts)
|
||||||
lower = m - 2*np.sqrt(v)
|
lower = m - 2*np.sqrt(v)
|
||||||
upper = m + 2*np.sqrt(v)
|
upper = m + 2*np.sqrt(v)
|
||||||
Y = self.likelihood.Y
|
Y = self.Y
|
||||||
else:
|
else:
|
||||||
m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=False) #Compute the exact mean
|
m, v, lower, upper = self.predict(Xgrid, which_parts=which_parts) #Compute the exact mean
|
||||||
m_, v_, lower, upper = self.predict(Xgrid, which_parts=which_parts, sampling=True, num_samples=15000) #Apporximate the percentiles
|
Y = self.Y
|
||||||
Y = self.likelihood.data
|
|
||||||
for d in which_data_ycols:
|
for d in which_data_ycols:
|
||||||
gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol)
|
gpplot(Xnew, m[:, d], lower[:, d], upper[:, d], axes=ax, edgecol=linecol, fillcol=fillcol)
|
||||||
ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5)
|
ax.plot(Xu[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', mew=1.5)
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ class Posterior(object):
|
||||||
schemes and the model classes.
|
schemes and the model classes.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, log_marginal, dL_dK, dL_dtheta_lik, woodbury_chol, woodbury_vector, K):
|
def __init__(self, log_marginal, dL_dK, dL_dtheta_lik, woodbury_chol=None, woodbury_vector=None, K=None, mean=None, cov=None, K_chol=None):
|
||||||
"""
|
"""
|
||||||
log_marginal: log p(Y|X)
|
log_marginal: log p(Y|X)
|
||||||
DL_dK: d/dK log p(Y|X)
|
DL_dK: d/dK log p(Y|X)
|
||||||
|
|
@ -21,17 +21,51 @@ class Posterior(object):
|
||||||
woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K
|
woodbury_chol : a lower triangular matrix L that satisfies posterior_covariance = K - K L^{-T} L^{-1} K
|
||||||
woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M
|
woodbury_vector : a matrix (or vector, as Nx1 matrix) M which satisfies posterior_mean = K M
|
||||||
K : the proir covariance (required for lazy computation of various quantities)
|
K : the proir covariance (required for lazy computation of various quantities)
|
||||||
|
mean : the posterior mean
|
||||||
|
cov : the posterior covariance
|
||||||
|
|
||||||
|
Not all of the above need to be supplied! You *must* supply:
|
||||||
|
|
||||||
|
log_marginal
|
||||||
|
dL_dK
|
||||||
|
dL_dtheta_lik
|
||||||
|
K (for lazy computation)
|
||||||
|
|
||||||
|
You may supply either:
|
||||||
|
|
||||||
|
woodbury_chol
|
||||||
|
woodbury_vector
|
||||||
|
|
||||||
|
Or:
|
||||||
|
|
||||||
|
mean
|
||||||
|
cov
|
||||||
|
K_chol (for lazy computation)
|
||||||
|
|
||||||
|
From the supplied quantities, all of the others will be computed on demand (lazy computation)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
#obligatory
|
||||||
self.log_marginal = log_marginal
|
self.log_marginal = log_marginal
|
||||||
self.dL_dK = dL_dK
|
self.dL_dK = dL_dK
|
||||||
self.dL_dtheta_lik = dL_dtheta_lik
|
self.dL_dtheta_lik = dL_dtheta_lik
|
||||||
self._woodbury_chol = woodbury_chol
|
|
||||||
self._woodbury_vector = woodbury_vector
|
|
||||||
self._K = K
|
self._K = K
|
||||||
|
|
||||||
#these are computed lazily below
|
if ((woodbury_chol is not None) and (woodbury_vector is not None) and (K is not None)) or ((mean is not None) and (cov is not None) and (K is not None)):
|
||||||
self._mean = None
|
pass # we have sufficient to compute the posterior
|
||||||
self._covariance = None
|
else:
|
||||||
|
raise ValueError, "insufficient onformation to compute the posterior"
|
||||||
|
|
||||||
|
#option 1:
|
||||||
|
self._woodbury_chol = woodbury_chol
|
||||||
|
self._woodbury_vector = woodbury_vector
|
||||||
|
|
||||||
|
#option 2:
|
||||||
|
self._mean = mean
|
||||||
|
self._covariance = cov
|
||||||
|
self._K_chol = K_chol
|
||||||
|
|
||||||
|
#copmute this lazily
|
||||||
self._precision = None
|
self._precision = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -53,6 +87,20 @@ class Posterior(object):
|
||||||
self._precision = np.linalg.inv(self.covariance)
|
self._precision = np.linalg.inv(self.covariance)
|
||||||
return self._precision
|
return self._precision
|
||||||
|
|
||||||
|
@property
|
||||||
|
def woodbury_chol(self):
|
||||||
|
if self._woodbury_chol is None:
|
||||||
|
???
|
||||||
|
else:
|
||||||
|
return self._woodbury_chol
|
||||||
|
|
||||||
|
@property
|
||||||
|
def woodbury_vector(self):
|
||||||
|
if self._woodbury_vector is None:
|
||||||
|
???
|
||||||
|
else:
|
||||||
|
return self._woodbury_vector
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ class GPRegression(GP):
|
||||||
|
|
||||||
likelihood = likelihoods.Gaussian()
|
likelihood = likelihoods.Gaussian()
|
||||||
|
|
||||||
super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='gp regression')
|
super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='gp_regression')
|
||||||
|
|
||||||
def getstate(self):
|
def getstate(self):
|
||||||
return GP.getstate(self)
|
return GP.getstate(self)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue