mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-11 13:02:38 +02:00
Changes to allow multiple output plotting
This commit is contained in:
parent
7e1e8de5e4
commit
1c2a4c5c64
6 changed files with 109 additions and 27 deletions
|
|
@ -190,7 +190,7 @@ class GP(GPBase):
|
|||
Internal helper function for making predictions, does not account
|
||||
for normalization or likelihood
|
||||
"""
|
||||
assert isinstance(self.likelihood,EP_Mixed_Noise)
|
||||
assert hasattr(self,'multioutput')
|
||||
index = np.ones_like(_Xnew)*output
|
||||
_Xnew = np.hstack((_Xnew,index))
|
||||
|
||||
|
|
@ -208,5 +208,3 @@ class GP(GPBase):
|
|||
if stop:
|
||||
debug_this # @UndefinedVariable
|
||||
return mu, var
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class GPBase(Model):
|
|||
fig = pb.figure(num=fignum)
|
||||
ax = fig.add_subplot(111)
|
||||
|
||||
if self.X.shape[1] == 1 and not isinstance(self.likelihood,EP_Mixed_Noise):
|
||||
if self.X.shape[1] == 1 and not hasattr(self,'multioutput'):
|
||||
Xnew, xmin, xmax = x_frame1D(self.X, plot_limits=plot_limits)
|
||||
if samples == 0:
|
||||
m, v = self._raw_predict(Xnew, which_parts=which_parts)
|
||||
|
|
@ -80,7 +80,7 @@ class GPBase(Model):
|
|||
ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin)
|
||||
ax.set_ylim(ymin, ymax)
|
||||
|
||||
elif self.X.shape[1] == 2 and not isinstance(self.likelihood,EP_Mixed_Noise):
|
||||
elif self.X.shape[1] == 2 and not hasattr(self,'multioutput'):
|
||||
resolution = resolution or 50
|
||||
Xnew, xmin, xmax, xx, yy = x_frame2D(self.X, plot_limits, resolution)
|
||||
m, v = self._raw_predict(Xnew, which_parts=which_parts)
|
||||
|
|
@ -91,7 +91,7 @@ class GPBase(Model):
|
|||
ax.set_ylim(xmin[1], xmax[1])
|
||||
|
||||
|
||||
elif self.X.shape[1] == 2 and isinstance(self.likelihood,EP_Mixed_Noise):
|
||||
elif self.X.shape[1] == 2 and hasattr(self,'multioutput'):
|
||||
Xu = self.X[self.X[:,-1]==output ,0:1]
|
||||
Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import numpy as np
|
|||
import pylab as pb
|
||||
from ..util.linalg import mdot, jitchol, tdot, symmetrify, backsub_both_sides, chol_inv, dtrtrs, dpotrs, dpotri
|
||||
from scipy import linalg
|
||||
from ..likelihoods import Gaussian
|
||||
from ..likelihoods import Gaussian, EP,EP_Mixed_Noise
|
||||
from gp_base import GPBase
|
||||
|
||||
class SparseGP(GPBase):
|
||||
|
|
@ -314,3 +314,37 @@ class SparseGP(GPBase):
|
|||
elif self.X.shape[1] == 2:
|
||||
Zu = self.Z * self._Xscale + self._Xoffset
|
||||
ax.plot(Zu[:, 0], Zu[:, 1], 'wo')
|
||||
|
||||
|
||||
def predict_single_output(self, Xnew, output=0, which_parts='all', full_cov=False):
|
||||
"""
|
||||
Predict the function(s) at the new point(s) Xnew.
|
||||
Arguments
|
||||
---------
|
||||
:param Xnew: The points at which to make a prediction
|
||||
:type Xnew: np.ndarray, Nnew x self.input_dim
|
||||
:param which_parts: specifies which outputs kernel(s) to use in prediction
|
||||
:type which_parts: ('all', list of bools)
|
||||
:param full_cov: whether to return the folll covariance matrix, or just the diagonal
|
||||
:type full_cov: bool
|
||||
:rtype: posterior mean, a Numpy array, Nnew x self.input_dim
|
||||
:rtype: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise
|
||||
:rtype: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim
|
||||
|
||||
|
||||
If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
|
||||
This is to allow for different normalizations of the output dimensions.
|
||||
|
||||
"""
|
||||
assert isinstance(self.likelihood,EP_Mixed_Noise)
|
||||
index = np.ones_like(Xnew)*output
|
||||
Xnew = np.hstack((Xnew,index))
|
||||
|
||||
# normalize X values
|
||||
Xnew = (Xnew.copy() - self._Xoffset) / self._Xscale
|
||||
mu, var = self._raw_predict(Xnew, full_cov=full_cov, which_parts=which_parts)
|
||||
|
||||
# now push through likelihood
|
||||
mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov, noise_model = output)
|
||||
return mean, var, _025pm, _975pm
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,11 @@ class EP_Mixed_Noise(likelihood):
|
|||
|
||||
Arguments
|
||||
---------
|
||||
epsilon : Convergence criterion, maximum squared difference allowed between mean updates to stop iterations (float)
|
||||
noise_model : a likelihood function (see likelihood_functions.py)
|
||||
:param data_list: list of outputs
|
||||
:param noise_model_list: a list of noise models
|
||||
:param epsilon: Convergence criterion, maximum squared difference allowed between mean updates to stop iterations
|
||||
:type epsilon: float
|
||||
:param power_ep: list of power ep parameters
|
||||
"""
|
||||
assert len(data_list) == len(noise_model_list)
|
||||
self.noise_model_list = noise_model_list
|
||||
|
|
@ -60,6 +63,16 @@ class EP_Mixed_Noise(likelihood):
|
|||
self.trYYT = 0.
|
||||
|
||||
def predictive_values(self,mu,var,full_cov,noise_model):
|
||||
"""
|
||||
Predicts the output given the GP
|
||||
|
||||
:param mu: GP's mean
|
||||
:param var: GP's variance
|
||||
:param full_cov: whether to return the full covariance matrix, or just the diagonal
|
||||
:type full_cov: False|True
|
||||
:param noise_model: noise model to use
|
||||
:type noise_model: integer
|
||||
"""
|
||||
if full_cov:
|
||||
raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood"
|
||||
#_mu = []
|
||||
|
|
|
|||
|
|
@ -12,3 +12,4 @@ from warped_gp import WarpedGP
|
|||
from bayesian_gplvm import BayesianGPLVM
|
||||
from mrd import MRD
|
||||
from gp_multioutput import GPMultioutput
|
||||
from sparse_gp_multioutput import SparseGPMultioutput
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class GPMultioutput(GP):
|
|||
:param X_list: input observations
|
||||
:param Y_list: observed values
|
||||
:param L_list: a GPy likelihood, defaults to Binomial with probit link_function
|
||||
:param kernel: a GPy kernel, defaults to rbf
|
||||
:param kernel_list: a GPy kernel, defaults to rbf
|
||||
:param normalize_X: whether to normalize the input data before computing (predictions will be in original scales)
|
||||
:type normalize_X: False|True
|
||||
:param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales)
|
||||
|
|
@ -29,28 +29,64 @@ class GPMultioutput(GP):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self,X_list,Y_list=None,likelihood=None,kernel=None,normalize_X=False,normalize_Y=False,W=1):
|
||||
def __init__(self,X_list,Y_list,noise_list=[],kernel_list=None,normalize_X=False,normalize_Y=False,W=1): #TODO W
|
||||
|
||||
if likelihood is None:
|
||||
noise_model_list = [likelihoods.gaussian(variance=1.) for Y in Y_list]
|
||||
likelihood = likelihoods.EP_Mixed_Noise(Y_list, noise_model_list)
|
||||
assert len(X_list) == len(Y_list)
|
||||
index = []
|
||||
i = 0
|
||||
for x,y in zip(X_list,Y_list):
|
||||
assert x.shape[0] == y.shape[0]
|
||||
index.append(np.repeat(i,y.size)[:,None])
|
||||
i += 1
|
||||
index = np.vstack(index)
|
||||
|
||||
elif Y_list is not None:
|
||||
if not all(np.vstack(Y_list).flatten() == likelihood.data.flatten()):
|
||||
raise Warning, 'likelihood.data and Y_list values are different.'
|
||||
if noise_list == []:
|
||||
likelihood_list = []
|
||||
for Y in Y_list:
|
||||
likelihood_list.append(likelihoods.Gaussian(Y,normalize = normalize_Y))
|
||||
|
||||
X = np.hstack([np.vstack(X_list),likelihood.index])
|
||||
Y = np.vstack([l_.Y for l_ in likelihood_list])
|
||||
likelihood = likelihoods.Gaussian(Y,normalize=False)
|
||||
likelihood.index = index
|
||||
|
||||
if kernel is None:
|
||||
X = np.hstack([np.vstack(X_list),index])
|
||||
|
||||
if kernel_list is None:
|
||||
original_dim = X.shape[1]-1
|
||||
kernel = kern.rbf(original_dim) + kern.white(original_dim)
|
||||
|
||||
mkernel = kernel.prod(kern.coregionalise(len(X_list),W),tensor=True) #TODO W
|
||||
|
||||
#kern1 = kern.rbf(1) + kern.white(1)
|
||||
#kern2 = kern.coregionalise(2,1)
|
||||
#kern3 = kern1.prod(kern2,tensor=True)
|
||||
|
||||
kernel_list = [kern.rbf(original_dim) + kern.white(original_dim)]
|
||||
|
||||
mkernel = kernel_list[0].prod(kern.coregionalise(len(X_list),W),tensor=True)
|
||||
for k in kernel_list[1:]:
|
||||
mkernel += k.prod(kern.coregionalise(len(X_list),W),tensor=True)
|
||||
self.multioutput = True
|
||||
GP.__init__(self, X, likelihood, mkernel, normalize_X=normalize_X)
|
||||
self.ensure_default_constraints()
|
||||
|
||||
|
||||
"""
|
||||
if likelihood is None:
|
||||
noise_model_list = []
|
||||
for Y in Y_list:
|
||||
noise_model_list.append(likelihoods.Gaussian(Y,normalize = normalize_Y))
|
||||
#noise_model_list = [likelihoods.gaussian(variance=1.) for Y in Y_list]
|
||||
#likelihood = likelihoods.EP_Mixed_Noise(Y_list, noise_model_list)
|
||||
|
||||
elif Y_list is not None:
|
||||
if not all(np.vstack(Y_list).flatten() == likelihood.data.flatten()):
|
||||
raise Warning, 'likelihood.data and Y_list values are different.'
|
||||
|
||||
X = np.hstack([np.vstack(X_list),likelihood.index])
|
||||
|
||||
if kernel_list is None:
|
||||
original_dim = X.shape[1]-1
|
||||
kernel_list = [kern.rbf(original_dim) + kern.white(original_dim)]
|
||||
|
||||
mkernel = kernel_list[0].prod(kern.coregionalise(len(X_list),W),tensor=True) #TODO W
|
||||
for k in kernel_list[1:]:
|
||||
mkernel += k.prod(kern.coregionalise(len(X_list),W),tensor=True) #TODO W
|
||||
|
||||
#kern1 = kern.rbf(1) + kern.white(1)
|
||||
#kern2 = kern.coregionalise(2,1)
|
||||
#kern3 = kern1.prod(kern2,tensor=True)
|
||||
"""
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue