[normalizer] first commit for normalizer in GPy

Conflicts:
	GPy/core/sparse_gp.py
	GPy/models/bayesian_gplvm.py
This commit is contained in:
mzwiessele 2014-08-27 12:05:13 -07:00
parent 7ed0e70a46
commit 7ec0e75c0e
6 changed files with 101 additions and 24 deletions

View file

@ -15,6 +15,7 @@ from parameterization.variational import VariationalPosterior
from scipy.sparse.base import issparse
import logging
from GPy.util.normalizer import GaussianNorm
logger = logging.getLogger("GP")
class GP(Model):
@ -27,12 +28,17 @@ class GP(Model):
:param likelihood: a GPy likelihood
:param :class:`~GPy.inference.latent_function_inference.LatentFunctionInference` inference_method: The inference method to use for this GP
:rtype: model object
:param Norm normalizer:
normalize the outputs Y.
Prediction will be un-normalized using this normalizer.
If normalizer is None, we will normalize using GaussianNorm.
If normalizer is False, no normalization will be done.
.. Note:: Multiple independent outputs are allowed using columns of Y
"""
def __init__(self, X, Y, kernel, likelihood, inference_method=None, name='gp', Y_metadata=None):
def __init__(self, X, Y, kernel, likelihood, inference_method=None, name='gp', Y_metadata=None, normalizer=False):
super(GP, self).__init__(name)
assert X.ndim == 2
@ -44,8 +50,22 @@ class GP(Model):
assert Y.ndim == 2
logger.info("initializing Y")
if issparse(Y): self.Y = Y
else: self.Y = ObsAr(Y)
if normalizer is None:
self.normalizer = GaussianNorm()
elif normalizer is False:
self.normalizer = None
else:
self.normalizer = normalizer
if self.normalizer is not None:
self.normalizer.scale_by(Y)
self.Y_normalized = ObsAr(self.normalizer.normalize(Y))
self.Y = Y
else:
self.Y = ObsAr(Y)
self.Y_normalized = self.Y
assert Y.shape[0] == self.num_data
_, self.output_dim = self.Y.shape
@ -74,7 +94,7 @@ class GP(Model):
self.add_parameter(self.likelihood)
def parameters_changed(self):
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y, self.Y_metadata)
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y_normalized, self.Y_metadata)
self.likelihood.update_gradients(self.grad_dict['dL_dthetaL'])
self.kern.update_gradients_full(self.grad_dict['dL_dK'], self.X)
@ -142,7 +162,11 @@ class GP(Model):
# now push through likelihood
mean, var = self.likelihood.predictive_values(mu, var, full_cov, Y_metadata)
return mean, var
if self.normalizer is not None:
return self.normalizer.inverse_mean(mean), self.normalizer.inverse_variance(var)
else:
return mean, var
def predict_quantiles(self, X, quantiles=(2.5, 97.5), Y_metadata=None):
m, v = self._raw_predict(X, full_cov=False)

View file

@ -34,7 +34,7 @@ class SparseGP(GP):
"""
def __init__(self, X, Y, Z, kernel, likelihood, inference_method=None, name='sparse gp', Y_metadata=None):
def __init__(self, X, Y, Z, kernel, likelihood, inference_method=None, name='sparse gp', Y_metadata=None, normalizer=False):
#pick a sensible inference method
if inference_method is None:
@ -48,7 +48,7 @@ class SparseGP(GP):
self.Z = Param('inducing inputs', Z)
self.num_inducing = Z.shape[0]
GP.__init__(self, X, Y, kernel, likelihood, inference_method=inference_method, name=name, Y_metadata=Y_metadata)
GP.__init__(self, X, Y, kernel, likelihood, inference_method=inference_method, name=name, Y_metadata=Y_metadata, normalizer=normalizer)
logger.info("Adding Z as parameter")
self.add_parameter(self.Z, index=0)
@ -56,7 +56,7 @@ class SparseGP(GP):
return isinstance(self.X, VariationalPosterior)
def parameters_changed(self):
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.Z, self.likelihood, self.Y, self.Y_metadata)
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.Z, self.likelihood, self.Y_normalized, self.Y_metadata)
self.likelihood.update_gradients(self.grad_dict['dL_dthetaL'])
if isinstance(self.X, VariationalPosterior):
#gradients wrt kernel