[infer_newX] updated for missing data

This commit is contained in:
Max Zwiessele 2014-11-14 11:09:51 +00:00
parent e7aac70c0a
commit c635de54b9
2 changed files with 23 additions and 23 deletions

View file

@ -453,11 +453,7 @@ class GP(Model):
:param optimize: whether to optimize the location of new X (True by default) :param optimize: whether to optimize the location of new X (True by default)
:type optimize: boolean :type optimize: boolean
:return: a tuple containing the posterior estimation of X and the model that optimize X :return: a tuple containing the posterior estimation of X and the model that optimize X
<<<<<<< HEAD
:rtype: (GPy.core.parameterization.variational.VariationalPosterior or numpy.ndarray, GPy.core.Model)
=======
:rtype: (:class:`~GPy.core.parameterization.variational.VariationalPosterior` or numpy.ndarray, :class:`~GPy.core.model.Model`) :rtype: (:class:`~GPy.core.parameterization.variational.VariationalPosterior` or numpy.ndarray, :class:`~GPy.core.model.Model`)
>>>>>>> 22d30d9d39c70f806fe5bcb815cce9c8eb0f8dca
""" """
from ..inference.latent_function_inference.inferenceX import infer_newX from ..inference.latent_function_inference.inferenceX import infer_newX
return infer_newX(self, Y_new, optimize=optimize) return infer_newX(self, Y_new, optimize=optimize)

View file

@ -34,10 +34,11 @@ class InferenceX(Model):
:type Y: numpy.ndarray :type Y: numpy.ndarray
""" """
def __init__(self, model, Y, name='inferenceX', init='L2'): def __init__(self, model, Y, name='inferenceX', init='L2'):
if np.isnan(Y).any(): if np.isnan(Y).any() or getattr(model, 'missing_data', False):
assert Y.shape[0]==1, "The current implementation of inference X only support one data point at a time with missing data!" assert Y.shape[0]==1, "The current implementation of inference X only support one data point at a time with missing data!"
self.missing_data = True self.missing_data = True
self.valid_dim = np.logical_not(np.isnan(Y[0])) self.valid_dim = np.logical_not(np.isnan(Y[0]))
self.ninan = getattr(model, 'ninan', None)
else: else:
self.missing_data = False self.missing_data = False
super(InferenceX, self).__init__(name) super(InferenceX, self).__init__(name)
@ -108,12 +109,15 @@ class InferenceX(Model):
wv = self.posterior.woodbury_vector wv = self.posterior.woodbury_vector
if self.missing_data: if self.missing_data:
wv = wv[:,self.valid_dim] wv = wv[:,self.valid_dim]
output_dim = self.valid_dim.sum() if self.ninan is not None:
self.dL_dpsi2 = beta*(output_dim*self.posterior.woodbury_inv - np.einsum('md,od->mo',wv, wv))/2. self.dL_dpsi2 = beta/2.*(self.posterior.woodbury_inv[:, :, self.valid_dim] - np.einsum('md,od->mo',wv, wv)[:, :, None])
self.dL_dpsi2 = self.dL_dpsi2.sum(-1)
else:
self.dL_dpsi2 = beta/2.*(self.valid_dim.sum() * self.posterior.woodbury_inv - np.einsum('md,od->mo',wv, wv))
self.dL_dpsi1 = beta*np.dot(self.Y[:,self.valid_dim], wv.T) self.dL_dpsi1 = beta*np.dot(self.Y[:,self.valid_dim], wv.T)
self.dL_dpsi0 = - beta/2.* np.ones(self.Y.shape[0]) self.dL_dpsi0 = - beta/2.* np.ones(self.Y.shape[0])
else: else:
self.dL_dpsi2 = beta*(output_dim*self.posterior.woodbury_inv - np.einsum('md,od->mo',wv, wv))/2. self.dL_dpsi2 = beta/2.*(output_dim*self.posterior.woodbury_inv - np.einsum('md,od->mo',wv, wv))
self.dL_dpsi1 = beta*np.dot(self.Y, wv.T) self.dL_dpsi1 = beta*np.dot(self.Y, wv.T)
self.dL_dpsi0 = -beta/2.* np.ones(self.Y.shape[0]) self.dL_dpsi0 = -beta/2.* np.ones(self.Y.shape[0])