Merge branch 'devel' of https://github.com/SheffieldML/GPy into devel

This commit is contained in:
mzwiessele 2015-07-29 10:48:27 +02:00
commit d6defa6645
9 changed files with 92 additions and 92 deletions

View file

@ -27,12 +27,19 @@ def infer_newX(model, Y_new, optimize=True, init='L2'):
class InferenceX(Model):
"""
The class for inference of new X with given new Y. (do_test_latent)
The model class for inference of new X with given new Y. (replacing the "do_test_latent" in Bayesian GPLVM)
It is a tiny inference model created from the original GP model. The kernel, likelihood (only Gaussian is supported at the moment)
and posterior distribution are taken from the original model.
For Regression models and GPLVM, a point estimate of the latent variable X will be inferred.
For Bayesian GPLVM, the variational posterior of X will be inferred.
X is inferred through a gradient optimization of the inference model.
:param model: the GPy model used in inference
:type model: GPy.core.Model
:param Y: the new observed data for inference
:type Y: numpy.ndarray
:param init: the distance metric of Y for initializing X with the nearest neighbour.
:type init: 'L2', 'NCC' and 'rand'
"""
def __init__(self, model, Y, name='inferenceX', init='L2'):
if np.isnan(Y).any() or getattr(model, 'missing_data', False):

View file

@ -139,10 +139,6 @@ class Laplace(LatentFunctionInference):
f_hat, Ki_fhat = self.rasm_mode(K, Y, likelihood, Ki_f_init, Y_metadata=Y_metadata)
self.f_hat = f_hat
#self.Ki_fhat = Ki_fhat
#self.K = K.copy()
#Compute hessian and other variables at mode
log_marginal, woodbury_inv, dL_dK, dL_dthetaL = self.mode_computations(f_hat, Ki_fhat, K, Y, likelihood, kern, Y_metadata)
@ -298,6 +294,11 @@ class Laplace(LatentFunctionInference):
else:
dL_dthetaL = np.zeros(likelihood.size)
#Cache some things for speedy LOO
self.Ki_W_i = Ki_W_i
self.K = K
self.W = W
self.f_hat = f_hat
return log_marginal, K_Wi_i, dL_dK, dL_dthetaL
def _compute_B_statistics(self, K, W, log_concave, *args, **kwargs):