[missing_data in sparse gp] can be extended towards missing_data handling in gp itself. Setting up gpy issue

This commit is contained in:
Max Zwiessele 2014-10-09 10:34:01 +01:00
parent de801c9d29
commit 829e40b25c
5 changed files with 15 additions and 11 deletions

View file

@ -53,7 +53,9 @@ class Param(Parameterizable, ObsAr):
return obj
def __init__(self, name, input_array, default_constraint=None, *a, **kw):
self._in_init_ = True
super(Param, self).__init__(name=name, default_constraint=default_constraint, *a, **kw)
self._in_init_ = False
def build_pydot(self,G):
import pydot

View file

@ -18,7 +18,7 @@ import numpy as np
import re
import logging
__updated__ = '2014-09-22'
__updated__ = '2014-10-09'
class HierarchyError(Exception):
"""
@ -99,7 +99,7 @@ class Observable(object):
:param bool trigger_parent: Whether to trigger the parent, after self has updated
"""
if not self.update_model():
if not self.update_model() or self._in_init_:
#print "Warning: updates are off, updating the model will do nothing"
return
self._trigger_params_changed(trigger_parent)

View file

@ -218,6 +218,7 @@ class SparseGP(GP):
print message,
for d in xrange(self.output_dim):
ninan = self.ninan[:, d]
print ' '*(len(message)) + '\r',
message = m_f(d)
print message,
@ -249,9 +250,8 @@ class SparseGP(GP):
if self.missing_data:
self._outer_loop_for_missing_data()
else:
self.posterior, self._log_marginal_likelihood, self.grad_dict, gradients, _ = self._inner_parameters_changed(self.kern, self.X, self.Z, self.likelihood, self.Y_normalized, self.Y_metadata)
self.kern.gradient = gradients['kerngrad']
self.Z.gradient = gradients['Zgrad']
self.posterior, self._log_marginal_likelihood, self.grad_dict, full_values, _ = self._inner_parameters_changed(self.kern, self.X, self.Z, self.likelihood, self.Y_normalized, self.Y_metadata)
self._outer_values_update(full_values)
def _raw_predict(self, Xnew, full_cov=False, kern=None):
"""

View file

@ -374,9 +374,6 @@ def bgplvm_simulation_missing_data(optimize=True, verbose=1,
m = BayesianGPLVM(Ymissing, Q, init="random", num_inducing=num_inducing,
kernel=k, missing_data=True)
m.X.variance[:] = _np.random.uniform(0,.1,m.X.shape)
m.likelihood.variance = .01
m.parameters_changed()
m.Yreal = Y
if optimize:

View file

@ -32,7 +32,7 @@ class BayesianGPLVM(SparseGP_MPI):
self.__IN_OPTIMIZATION__ = False
self.logger = logging.getLogger(self.__class__.__name__)
if X == None:
if X is None:
from ..util.initialization import initialize_latent
self.logger.info("initializing latent space X with method {}".format(init))
X, fracs = initialize_latent(init, input_dim, Y)
@ -97,14 +97,19 @@ class BayesianGPLVM(SparseGP_MPI):
Z=Z, dL_dpsi0=grad_dict['dL_dpsi0'],
dL_dpsi1=grad_dict['dL_dpsi1'],
dL_dpsi2=grad_dict['dL_dpsi2'])
# Subsetting Variational Posterior objects, makes the gradients
# empty. We need them to be 0 though:
X.mean.gradient[:] = 0
X.variance.gradient[:] = 0
self.variational_prior.update_gradients_KL(X)
current_values['meangrad'] += X.mean.gradient
current_values['vargrad'] += X.variance.gradient
value_indices['meangrad'] = subset_indices['samples']
value_indices['vargrad'] = subset_indices['samples']
if subset_indices is not None:
value_indices['meangrad'] = subset_indices['samples']
value_indices['vargrad'] = subset_indices['samples']
return posterior, log_marginal_likelihood, grad_dict, current_values, value_indices
def _outer_values_update(self, full_values):