Fixing W_columns and num_outputs nomenclature

This commit is contained in:
Ricardo 2013-09-23 17:29:33 +01:00
parent c800e0687f
commit 0ae9f9aafd
4 changed files with 36 additions and 21 deletions

View file

@ -128,10 +128,9 @@ class GPBase(Model):
else: else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions" raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
else: else:
assert self.num_outputs > output, 'The model has only %s outputs.' %self.num_outputs assert len(self.likelihood.noise_model_list) > output, 'The model has only %s outputs.' %self.num_outputs
if self.X.shape[1] == 2: if self.X.shape[1] == 2:
assert self.num_outputs >= output, 'The model has only %s outputs.' %self.num_outputs
Xu = self.X[self.X[:,-1]==output ,0:1] Xu = self.X[self.X[:,-1]==output ,0:1]
Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits) Xnew, xmin, xmax = x_frame1D(Xu, plot_limits=plot_limits)
@ -263,7 +262,7 @@ class GPBase(Model):
raise NotImplementedError, "Cannot define a frame with more than two input dimensions" raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
else: else:
assert self.num_outputs > output, 'The model has only %s outputs.' %self.num_outputs assert len(self.likelihood.noise_model_list) > output, 'The model has only %s outputs.' %self.num_outputs
if self.X.shape[1] == 2: if self.X.shape[1] == 2:
resolution = resolution or 200 resolution = resolution or 200
Xu = self.X[self.X[:,-1]==output,:] #keep the output of interest Xu = self.X[self.X[:,-1]==output,:] #keep the output of interest
@ -287,3 +286,20 @@ class GPBase(Model):
else: else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions" raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
"""
def samples_f(self,X,samples=10, which_data='all', which_parts='all',output=None):
if which_data == 'all':
which_data = slice(None)
if hasattr(self,'multioutput'):
np.hstack([X,np.ones((X.shape[0],1))*output])
m, v = self._raw_predict(X, which_parts=which_parts, full_cov=True)
v = v.reshape(m.size,-1) if len(v.shape)==3 else v
Ysim = np.random.multivariate_normal(m.flatten(), v, samples)
#gpplot(X, m, m - 2 * np.sqrt(np.diag(v)[:, None]), m + 2 * np.sqrt(np.diag(v))[:, None, ], axes=ax)
for i in range(samples):
ax.plot(X, Ysim[i, :], Tango.colorsHex['darkBlue'], linewidth=0.25)
"""

View file

@ -367,9 +367,8 @@ class SparseGP(GPBase):
ax.plot(Zu[:, 0], Zu[:, 1], 'wo') ax.plot(Zu[:, 0], Zu[:, 1], 'wo')
else: else:
pass
"""
if self.X.shape[1] == 2 and hasattr(self,'multioutput'): if self.X.shape[1] == 2 and hasattr(self,'multioutput'):
"""
Xu = self.X[self.X[:,-1]==output,:] Xu = self.X[self.X[:,-1]==output,:]
if self.has_uncertain_inputs: if self.has_uncertain_inputs:
Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now Xu = self.X * self._Xscale + self._Xoffset # NOTE self.X are the normalized values now
@ -380,6 +379,7 @@ class SparseGP(GPBase):
xerr=2 * np.sqrt(self.X_variance[which_data, 0]), xerr=2 * np.sqrt(self.X_variance[which_data, 0]),
ecolor='k', fmt=None, elinewidth=.5, alpha=.5) ecolor='k', fmt=None, elinewidth=.5, alpha=.5)
"""
Zu = self.Z[self.Z[:,-1]==output,:] Zu = self.Z[self.Z[:,-1]==output,:]
Zu = self.Z * self._Xscale + self._Xoffset Zu = self.Z * self._Xscale + self._Xoffset
Zu = self.Z[self.Z[:,-1]==output ,0:1] #?? Zu = self.Z[self.Z[:,-1]==output ,0:1] #??
@ -388,7 +388,6 @@ class SparseGP(GPBase):
else: else:
raise NotImplementedError, "Cannot define a frame with more than two input dimensions" raise NotImplementedError, "Cannot define a frame with more than two input dimensions"
"""
def predict_single_output(self, Xnew, output=0, which_parts='all', full_cov=False): def predict_single_output(self, Xnew, output=0, which_parts='all', full_cov=False):
""" """

View file

@ -25,14 +25,14 @@ class GPMultioutputRegression(GP):
:type normalize_X: False|True :type normalize_X: False|True
:param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales) :param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales)
:type normalize_Y: False|True :type normalize_Y: False|True
:param W_columns: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation) :param rank: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation)
:type W_columns: integer :type rank: integer
""" """
def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,W_columns=1): def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,rank=1):
self.num_outputs = len(Y_list) self.output_dim = len(Y_list)
assert len(X_list) == self.num_outputs, 'Number of outputs do not match length of inputs list.' assert len(X_list) == self.output_dim, 'Number of outputs do not match length of inputs list.'
#Inputs indexing #Inputs indexing
i = 0 i = 0
@ -51,7 +51,7 @@ class GPMultioutputRegression(GP):
#Coregionalization kernel definition #Coregionalization kernel definition
if kernel_list is None: if kernel_list is None:
kernel_list = [kern.rbf(original_dim)] kernel_list = [kern.rbf(original_dim)]
mkernel = kern.build_lcm(input_dim=original_dim, num_outputs=self.num_outputs, kernel_list = kernel_list, W_columns=W_columns) mkernel = kern.build_lcm(input_dim=original_dim, output_dim=self.output_dim, kernel_list = kernel_list, rank=rank)
self.multioutput = True self.multioutput = True
GP.__init__(self, X, likelihood, mkernel, normalize_X=normalize_X) GP.__init__(self, X, likelihood, mkernel, normalize_X=normalize_X)

View file

@ -30,23 +30,23 @@ class SparseGPMultioutputRegression(SparseGP):
:type Z_list: list of numpy arrays (num_inducing_output_i x input_dim), one array per output | empty list :type Z_list: list of numpy arrays (num_inducing_output_i x input_dim), one array per output | empty list
:param num_inducing: number of inducing inputs per output, defaults to 10 (ignored if Z_list is not empty) :param num_inducing: number of inducing inputs per output, defaults to 10 (ignored if Z_list is not empty)
:type num_inducing: integer :type num_inducing: integer
:param W_columns: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation) :param rank: number tuples of the corregionalization parameters 'coregion_W' (see coregionalize kernel documentation)
:type W_columns: integer :type rank: integer
""" """
#NOTE not tested with uncertain inputs #NOTE not tested with uncertain inputs
def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,Z_list=[],num_inducing=10,W_columns=1): def __init__(self,X_list,Y_list,kernel_list=None,noise_variance_list=None,normalize_X=False,normalize_Y=False,Z_list=[],num_inducing=10,rank=1):
self.num_outputs = len(Y_list) self.output_dim = len(Y_list)
assert len(X_list) == self.num_outputs, 'Number of outputs do not match length of inputs list.' assert len(X_list) == self.output_dim, 'Number of outputs do not match length of inputs list.'
#Inducing inputs list #Inducing inputs list
if len(Z_list): if len(Z_list):
assert len(Z_list) == self.num_outputs, 'Number of outputs do not match length of inducing inputs list.' assert len(Z_list) == self.output_dim, 'Number of outputs do not match length of inducing inputs list.'
else: else:
if isinstance(num_inducing,np.int): if isinstance(num_inducing,np.int):
num_inducing = [num_inducing] * self.num_outputs num_inducing = [num_inducing] * self.output_dim
num_inducing = np.asarray(num_inducing) num_inducing = np.asarray(num_inducing)
assert num_inducing.size == self.num_outputs, 'Number of outputs do not match length of inducing inputs list.' assert num_inducing.size == self.output_dim, 'Number of outputs do not match length of inducing inputs list.'
for ni,X in zip(num_inducing,X_list): for ni,X in zip(num_inducing,X_list):
i = np.random.permutation(X.shape[0])[:ni] i = np.random.permutation(X.shape[0])[:ni]
Z_list.append(X[i].copy()) Z_list.append(X[i].copy())
@ -72,7 +72,7 @@ class SparseGPMultioutputRegression(SparseGP):
#Coregionalization kernel definition #Coregionalization kernel definition
if kernel_list is None: if kernel_list is None:
kernel_list = [kern.rbf(original_dim)] kernel_list = [kern.rbf(original_dim)]
mkernel = kern.build_lcm(input_dim=original_dim, num_outputs=self.num_outputs, kernel_list = kernel_list, W_columns=W_columns) mkernel = kern.build_lcm(input_dim=original_dim, output_dim=self.output_dim, kernel_list = kernel_list, rank=rank)
self.multioutput = True self.multioutput = True
SparseGP.__init__(self, X, likelihood, mkernel, Z=Z, normalize_X=normalize_X) SparseGP.__init__(self, X, likelihood, mkernel, Z=Z, normalize_X=normalize_X)