2013-06-05 14:11:49 +01:00
|
|
|
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
|
|
|
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
2014-01-28 13:39:59 +00:00
|
|
|
import sys
|
2014-01-22 15:06:53 +00:00
|
|
|
import warnings
|
|
|
|
|
from .. import kern
|
2014-01-24 10:26:44 +00:00
|
|
|
from ..util.linalg import dtrtrs
|
2014-01-22 15:06:53 +00:00
|
|
|
from model import Model
|
|
|
|
|
from parameterization import ObservableArray
|
2013-12-05 15:09:31 -05:00
|
|
|
from .. import likelihoods
|
2014-01-24 10:24:17 +00:00
|
|
|
from ..likelihoods.gaussian import Gaussian
|
2014-03-14 11:47:23 +00:00
|
|
|
from ..inference.latent_function_inference import exact_gaussian_inference, expectation_propagation
|
2014-02-24 15:44:11 +00:00
|
|
|
from parameterization.variational import VariationalPosterior
|
2013-06-05 14:11:49 +01:00
|
|
|
|
2014-01-22 15:06:53 +00:00
|
|
|
class GP(Model):
|
2013-06-05 14:11:49 +01:00
|
|
|
"""
|
2014-01-22 15:06:53 +00:00
|
|
|
General purpose Gaussian process model
|
2013-06-05 14:11:49 +01:00
|
|
|
|
|
|
|
|
:param X: input observations
|
2014-01-22 15:06:53 +00:00
|
|
|
:param Y: output observations
|
2013-06-05 14:11:49 +01:00
|
|
|
:param kernel: a GPy kernel, defaults to rbf+white
|
2013-09-20 17:46:23 +01:00
|
|
|
:param likelihood: a GPy likelihood
|
2013-06-05 14:11:49 +01:00
|
|
|
:rtype: model object
|
|
|
|
|
|
|
|
|
|
.. Note:: Multiple independent outputs are allowed using columns of Y
|
|
|
|
|
|
2014-01-22 15:06:53 +00:00
|
|
|
|
2013-06-05 14:11:49 +01:00
|
|
|
"""
|
2014-03-13 12:13:00 +00:00
|
|
|
def __init__(self, X, Y, kernel, likelihood, inference_method=None, name='gp', Y_metadata=None):
|
2014-01-24 10:24:17 +00:00
|
|
|
super(GP, self).__init__(name)
|
2014-01-22 15:06:53 +00:00
|
|
|
|
|
|
|
|
assert X.ndim == 2
|
2014-03-03 15:07:33 +00:00
|
|
|
if isinstance(X, (ObservableArray, VariationalPosterior)):
|
2014-02-24 15:44:11 +00:00
|
|
|
self.X = X
|
2014-02-24 09:49:29 +00:00
|
|
|
else: self.X = ObservableArray(X)
|
2014-02-24 15:44:11 +00:00
|
|
|
|
2014-01-22 15:06:53 +00:00
|
|
|
self.num_data, self.input_dim = self.X.shape
|
|
|
|
|
|
|
|
|
|
assert Y.ndim == 2
|
|
|
|
|
self.Y = ObservableArray(Y)
|
|
|
|
|
assert Y.shape[0] == self.num_data
|
|
|
|
|
_, self.output_dim = self.Y.shape
|
|
|
|
|
|
2014-03-13 12:13:00 +00:00
|
|
|
self.Y_metadata = Y_metadata or {}
|
2014-02-12 15:53:38 +00:00
|
|
|
|
2014-02-19 15:00:48 +00:00
|
|
|
assert isinstance(kernel, kern.Kern)
|
2014-03-07 16:59:41 +00:00
|
|
|
#assert self.input_dim == kernel.input_dim
|
2014-01-22 15:06:53 +00:00
|
|
|
self.kern = kernel
|
|
|
|
|
|
|
|
|
|
assert isinstance(likelihood, likelihoods.Likelihood)
|
|
|
|
|
self.likelihood = likelihood
|
2013-12-05 15:09:31 -05:00
|
|
|
|
2013-12-10 12:17:59 -08:00
|
|
|
#find a sensible inference method
|
2013-12-05 15:09:31 -05:00
|
|
|
if inference_method is None:
|
2014-03-12 12:52:52 +00:00
|
|
|
if isinstance(likelihood, likelihoods.Gaussian) or isinstance(likelihood, likelihoods.MixedNoise):
|
2013-12-05 15:09:31 -05:00
|
|
|
inference_method = exact_gaussian_inference.ExactGaussianInference()
|
2014-01-29 17:02:44 +00:00
|
|
|
else:
|
2014-03-14 11:47:23 +00:00
|
|
|
inference_method = expectation_propagation.EP()
|
2014-01-29 17:02:44 +00:00
|
|
|
print "defaulting to ", inference_method, "for latent function inference"
|
2014-01-24 10:24:17 +00:00
|
|
|
self.inference_method = inference_method
|
2013-06-05 14:11:49 +01:00
|
|
|
|
2014-01-24 15:07:28 +00:00
|
|
|
self.add_parameter(self.kern)
|
|
|
|
|
self.add_parameter(self.likelihood)
|
2014-01-24 10:24:17 +00:00
|
|
|
|
2013-10-17 14:38:43 +01:00
|
|
|
def parameters_changed(self):
|
2014-03-13 12:13:00 +00:00
|
|
|
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y, self.Y_metadata)
|
|
|
|
|
self.likelihood.update_gradients(self.grad_dict['dL_dthetaL'])
|
2014-03-13 09:07:56 +00:00
|
|
|
self.kern.update_gradients_full(self.grad_dict['dL_dK'], self.X)
|
2014-03-12 12:06:21 +00:00
|
|
|
|
2013-06-05 14:11:49 +01:00
|
|
|
def log_likelihood(self):
|
2014-01-27 15:37:20 +00:00
|
|
|
return self._log_marginal_likelihood
|
2013-06-05 14:11:49 +01:00
|
|
|
|
2014-02-19 17:37:18 +00:00
|
|
|
def _raw_predict(self, _Xnew, full_cov=False):
|
2013-06-05 14:11:49 +01:00
|
|
|
"""
|
|
|
|
|
Internal helper function for making predictions, does not account
|
|
|
|
|
for normalization or likelihood
|
2013-12-04 20:12:40 +00:00
|
|
|
|
|
|
|
|
full_cov is a boolean which defines whether the full covariance matrix
|
|
|
|
|
of the prediction is computed. If full_cov is False (default), only the
|
|
|
|
|
diagonal of the covariance is returned.
|
|
|
|
|
|
2013-06-05 14:11:49 +01:00
|
|
|
"""
|
2014-02-19 17:37:18 +00:00
|
|
|
Kx = self.kern.K(_Xnew, self.X).T
|
2014-02-05 17:52:17 +00:00
|
|
|
#LiKx, _ = dtrtrs(self.posterior.woodbury_chol, np.asfortranarray(Kx), lower=1)
|
|
|
|
|
WiKx = np.dot(self.posterior.woodbury_inv, Kx)
|
2014-02-05 17:12:52 +00:00
|
|
|
mu = np.dot(Kx.T, self.posterior.woodbury_vector)
|
2013-06-05 14:11:49 +01:00
|
|
|
if full_cov:
|
2014-02-19 17:37:18 +00:00
|
|
|
Kxx = self.kern.K(_Xnew)
|
2014-02-05 17:52:17 +00:00
|
|
|
#var = Kxx - tdot(LiKx.T)
|
|
|
|
|
var = np.dot(Kx.T, WiKx)
|
2013-06-05 14:11:49 +01:00
|
|
|
else:
|
2014-02-19 17:37:18 +00:00
|
|
|
Kxx = self.kern.Kdiag(_Xnew)
|
2014-02-05 17:52:17 +00:00
|
|
|
#var = Kxx - np.sum(LiKx*LiKx, 0)
|
|
|
|
|
var = Kxx - np.sum(WiKx*Kx, 0)
|
2013-12-05 15:09:31 -05:00
|
|
|
var = var.reshape(-1, 1)
|
2013-06-05 14:11:49 +01:00
|
|
|
return mu, var
|
|
|
|
|
|
2014-03-13 12:13:00 +00:00
|
|
|
def predict(self, Xnew, full_cov=False, Y_metadata=None):
|
2013-06-05 14:11:49 +01:00
|
|
|
"""
|
|
|
|
|
Predict the function(s) at the new point(s) Xnew.
|
2013-09-20 17:46:23 +01:00
|
|
|
|
2013-06-05 14:11:49 +01:00
|
|
|
:param Xnew: The points at which to make a prediction
|
|
|
|
|
:type Xnew: np.ndarray, Nnew x self.input_dim
|
2014-01-28 14:45:00 +00:00
|
|
|
:param full_cov: whether to return the full covariance matrix, or just
|
|
|
|
|
the diagonal
|
2013-06-05 14:11:49 +01:00
|
|
|
:type full_cov: bool
|
2013-09-20 17:46:23 +01:00
|
|
|
:returns: mean: posterior mean, a Numpy array, Nnew x self.input_dim
|
2014-01-28 14:45:00 +00:00
|
|
|
:returns: var: posterior variance, a Numpy array, Nnew x 1 if
|
|
|
|
|
full_cov=False, Nnew x Nnew otherwise
|
|
|
|
|
:returns: lower and upper boundaries of the 95% confidence intervals,
|
|
|
|
|
Numpy arrays, Nnew x self.input_dim
|
2013-06-05 14:11:49 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
|
|
|
|
|
This is to allow for different normalizations of the output dimensions.
|
|
|
|
|
|
|
|
|
|
"""
|
2014-02-13 08:53:14 +00:00
|
|
|
#predict the latent function values
|
2014-02-19 17:37:18 +00:00
|
|
|
mu, var = self._raw_predict(Xnew, full_cov=full_cov)
|
2013-06-05 14:11:49 +01:00
|
|
|
|
|
|
|
|
# now push through likelihood
|
2014-03-13 14:42:03 +00:00
|
|
|
mean, var = self.likelihood.predictive_values(mu, var, full_cov, Y_metadata)
|
|
|
|
|
return mean, var
|
|
|
|
|
|
2014-03-13 15:35:54 +00:00
|
|
|
def predict_quantiles(self, X, quantiles=(2.5, 97.5), Y_metadata=None):
|
2014-03-13 14:42:03 +00:00
|
|
|
m, v = self._raw_predict(X, full_cov=False)
|
|
|
|
|
return self.likelihood.predictive_quantiles(m, v, quantiles, Y_metadata)
|
2013-06-05 14:11:49 +01:00
|
|
|
|
2014-02-19 17:37:18 +00:00
|
|
|
def posterior_samples_f(self,X,size=10, full_cov=True):
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
|
|
|
|
Samples the posterior GP at the points X.
|
|
|
|
|
|
|
|
|
|
:param X: The points at which to take the samples.
|
|
|
|
|
:type X: np.ndarray, Nnew x self.input_dim.
|
2014-01-28 13:39:59 +00:00
|
|
|
:param size: the number of a posteriori samples.
|
2014-01-22 15:06:53 +00:00
|
|
|
:type size: int.
|
|
|
|
|
:param full_cov: whether to return the full covariance matrix, or just the diagonal.
|
|
|
|
|
:type full_cov: bool.
|
|
|
|
|
:returns: Ysim: set of simulations, a Numpy array (N x samples).
|
|
|
|
|
"""
|
2014-02-19 17:37:18 +00:00
|
|
|
m, v = self._raw_predict(X, full_cov=full_cov)
|
2014-01-22 15:06:53 +00:00
|
|
|
v = v.reshape(m.size,-1) if len(v.shape)==3 else v
|
|
|
|
|
if not full_cov:
|
|
|
|
|
Ysim = np.random.multivariate_normal(m.flatten(), np.diag(v.flatten()), size).T
|
|
|
|
|
else:
|
|
|
|
|
Ysim = np.random.multivariate_normal(m.flatten(), v, size).T
|
|
|
|
|
|
|
|
|
|
return Ysim
|
|
|
|
|
|
2014-03-13 16:44:39 +00:00
|
|
|
def posterior_samples(self, X, size=10, full_cov=False, Y_metadata=None):
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
|
|
|
|
Samples the posterior GP at the points X.
|
|
|
|
|
|
|
|
|
|
:param X: the points at which to take the samples.
|
|
|
|
|
:type X: np.ndarray, Nnew x self.input_dim.
|
2014-01-28 13:39:59 +00:00
|
|
|
:param size: the number of a posteriori samples.
|
2014-01-22 15:06:53 +00:00
|
|
|
:type size: int.
|
|
|
|
|
:param full_cov: whether to return the full covariance matrix, or just the diagonal.
|
|
|
|
|
:type full_cov: bool.
|
|
|
|
|
:param noise_model: for mixed noise likelihood, the noise model to use in the samples.
|
|
|
|
|
:type noise_model: integer.
|
|
|
|
|
:returns: Ysim: set of simulations, a Numpy array (N x samples).
|
|
|
|
|
"""
|
2014-02-19 17:37:18 +00:00
|
|
|
Ysim = self.posterior_samples_f(X, size, full_cov=full_cov)
|
2014-03-13 16:44:39 +00:00
|
|
|
Ysim = self.likelihood.samples(Ysim, Y_metadata)
|
2014-01-22 15:06:53 +00:00
|
|
|
|
|
|
|
|
return Ysim
|
|
|
|
|
|
|
|
|
|
def plot_f(self, *args, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
|
2014-01-28 14:45:00 +00:00
|
|
|
Plot the GP's view of the world, where the data is normalized and
|
|
|
|
|
before applying a likelihood.
|
|
|
|
|
|
|
|
|
|
This is a convenience function: arguments are passed to
|
|
|
|
|
GPy.plotting.matplot_dep.models_plots.plot_f_fit
|
|
|
|
|
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
2014-01-28 13:39:59 +00:00
|
|
|
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
|
|
|
|
from ..plotting.matplot_dep import models_plots
|
2014-03-12 12:06:21 +00:00
|
|
|
return models_plots.plot_fit_f(self,*args,**kwargs)
|
2014-01-28 13:39:59 +00:00
|
|
|
|
2014-02-05 10:48:23 +00:00
|
|
|
def plot(self, *args, **kwargs):
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
|
|
|
|
Plot the posterior of the GP.
|
2014-01-28 14:45:00 +00:00
|
|
|
- In one dimension, the function is plotted with a shaded region
|
|
|
|
|
identifying two standard deviations.
|
|
|
|
|
- In two dimsensions, a contour-plot shows the mean predicted
|
|
|
|
|
function
|
|
|
|
|
- In higher dimensions, use fixed_inputs to plot the GP with some of
|
|
|
|
|
the inputs fixed.
|
2014-01-22 15:06:53 +00:00
|
|
|
|
|
|
|
|
Can plot only part of the data and part of the posterior functions
|
2014-01-28 13:39:59 +00:00
|
|
|
using which_data_rows which_data_ycols and which_parts
|
2014-01-22 15:06:53 +00:00
|
|
|
|
2014-01-28 14:45:00 +00:00
|
|
|
This is a convenience function: arguments are passed to
|
|
|
|
|
GPy.plotting.matplot_dep.models_plots.plot_fit
|
|
|
|
|
|
2014-01-28 13:39:59 +00:00
|
|
|
"""
|
|
|
|
|
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
|
|
|
|
from ..plotting.matplot_dep import models_plots
|
2014-03-12 12:06:21 +00:00
|
|
|
return models_plots.plot_fit(self,*args,**kwargs)
|
2014-01-22 15:06:53 +00:00
|
|
|
|
2014-01-24 15:48:23 +00:00
|
|
|
def _getstate(self):
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
2014-01-28 14:45:00 +00:00
|
|
|
|
|
|
|
|
Get the current state of the class, here we return everything that is
|
|
|
|
|
needed to recompute the model.
|
|
|
|
|
|
2014-01-22 15:06:53 +00:00
|
|
|
"""
|
2014-01-28 14:45:00 +00:00
|
|
|
|
2014-01-24 15:48:23 +00:00
|
|
|
return Model._getstate(self) + [self.X,
|
2014-01-22 15:06:53 +00:00
|
|
|
self.num_data,
|
|
|
|
|
self.input_dim,
|
|
|
|
|
self.kern,
|
|
|
|
|
self.likelihood,
|
|
|
|
|
self.output_dim,
|
|
|
|
|
]
|
|
|
|
|
|
2014-01-24 15:48:23 +00:00
|
|
|
def _setstate(self, state):
|
2014-01-22 15:06:53 +00:00
|
|
|
self.output_dim = state.pop()
|
|
|
|
|
self.likelihood = state.pop()
|
|
|
|
|
self.kern = state.pop()
|
|
|
|
|
self.input_dim = state.pop()
|
|
|
|
|
self.num_data = state.pop()
|
|
|
|
|
self.X = state.pop()
|
2014-01-24 15:48:23 +00:00
|
|
|
Model._setstate(self, state)
|