Nparam changes to num_params

This commit is contained in:
James Hensman 2013-10-08 14:49:18 +01:00
parent 39eb0368d8
commit a59d980327
10 changed files with 14 additions and 14 deletions

View file

@ -126,7 +126,7 @@ class FITC(SparseGP):
self._dpsi1_dX += self.kern.dK_dX(_dpsi1.T,self.Z,self.X[i:i+1,:]) self._dpsi1_dX += self.kern.dK_dX(_dpsi1.T,self.Z,self.X[i:i+1,:])
# the partial derivative vector for the likelihood # the partial derivative vector for the likelihood
if self.likelihood.Nparams == 0: if self.likelihood.num_params == 0:
# save computation here. # save computation here.
self.partial_for_likelihood = None self.partial_for_likelihood = None
elif self.likelihood.is_heteroscedastic: elif self.likelihood.is_heteroscedastic:

View file

@ -156,7 +156,7 @@ class SparseGP(GPBase):
# the partial derivative vector for the likelihood # the partial derivative vector for the likelihood
if self.likelihood.Nparams == 0: if self.likelihood.num_params == 0:
# save computation here. # save computation here.
self.partial_for_likelihood = None self.partial_for_likelihood = None
elif self.likelihood.is_heteroscedastic: elif self.likelihood.is_heteroscedastic:

View file

@ -113,7 +113,7 @@ class PeriodicMatern32(Kernpart):
@silence_errors @silence_errors
def dK_dtheta(self,dL_dK,X,X2,target): def dK_dtheta(self,dL_dK,X,X2,target):
"""derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" """derivative of the covariance matrix with respect to the parameters (shape is num_data x num_inducing x num_params)"""
if X2 is None: X2 = X if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)

View file

@ -115,7 +115,7 @@ class PeriodicMatern52(Kernpart):
@silence_errors @silence_errors
def dK_dtheta(self,dL_dK,X,X2,target): def dK_dtheta(self,dL_dK,X,X2,target):
"""derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" """derivative of the covariance matrix with respect to the parameters (shape is num_data x num_inducing x num_params)"""
if X2 is None: X2 = X if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)

View file

@ -111,7 +111,7 @@ class PeriodicExponential(Kernpart):
@silence_errors @silence_errors
def dK_dtheta(self,dL_dK,X,X2,target): def dK_dtheta(self,dL_dK,X,X2,target):
"""derivative of the covariance matrix with respect to the parameters (shape is Nxnum_inducingxNparam)""" """derivative of the covariance matrix with respect to the parameters (shape is N x num_inducing x num_params)"""
if X2 is None: X2 = X if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X) FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2) FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)

View file

@ -18,7 +18,7 @@ class EP(likelihood):
self.data = data self.data = data
self.num_data, self.output_dim = self.data.shape self.num_data, self.output_dim = self.data.shape
self.is_heteroscedastic = True self.is_heteroscedastic = True
self.Nparams = 0 self.num_params = 0
self._transf_data = self.noise_model._preprocess_values(data) self._transf_data = self.noise_model._preprocess_values(data)
#Initial values - Likelihood approximation parameters: #Initial values - Likelihood approximation parameters:

View file

@ -31,7 +31,7 @@ class EP_Mixed_Noise(likelihood):
self.data = np.vstack(data_list) self.data = np.vstack(data_list)
self.N, self.output_dim = self.data.shape self.N, self.output_dim = self.data.shape
self.is_heteroscedastic = True self.is_heteroscedastic = True
self.Nparams = 0#FIXME self.num_params = 0#FIXME
self._transf_data = np.vstack([noise_model._preprocess_values(data) for noise_model,data in zip(noise_model_list,data_list)]) self._transf_data = np.vstack([noise_model._preprocess_values(data) for noise_model,data in zip(noise_model_list,data_list)])
#TODO non-gaussian index #TODO non-gaussian index

View file

@ -15,7 +15,7 @@ class Gaussian(likelihood):
""" """
def __init__(self, data, variance=1., normalize=False): def __init__(self, data, variance=1., normalize=False):
self.is_heteroscedastic = False self.is_heteroscedastic = False
self.Nparams = 1 self.num_params = 1
self.Z = 0. # a correction factor which accounts for the approximation made self.Z = 0. # a correction factor which accounts for the approximation made
N, self.output_dim = data.shape N, self.output_dim = data.shape

View file

@ -23,14 +23,14 @@ class Gaussian_Mixed_Noise(likelihood):
:type normalize: False|True :type normalize: False|True
""" """
def __init__(self, data_list, noise_params=None, normalize=True): def __init__(self, data_list, noise_params=None, normalize=True):
self.Nparams = len(data_list) self.num_params = len(data_list)
self.n_list = [data.size for data in data_list] self.n_list = [data.size for data in data_list]
self.index = np.vstack([np.repeat(i,n)[:,None] for i,n in zip(range(self.Nparams),self.n_list)]) self.index = np.vstack([np.repeat(i,n)[:,None] for i,n in zip(range(self.num_params),self.n_list)])
if noise_params is None: if noise_params is None:
noise_params = [1.] * self.Nparams noise_params = [1.] * self.num_params
else: else:
assert self.Nparams == len(noise_params), 'Number of noise parameters does not match the number of noise models.' assert self.num_params == len(noise_params), 'Number of noise parameters does not match the number of noise models.'
self.noise_model_list = [Gaussian(Y,variance=v,normalize = normalize) for Y,v in zip(data_list,noise_params)] self.noise_model_list = [Gaussian(Y,variance=v,normalize = normalize) for Y,v in zip(data_list,noise_params)]
self.n_params = [noise_model._get_params().size for noise_model in self.noise_model_list] self.n_params = [noise_model._get_params().size for noise_model in self.noise_model_list]

View file

@ -211,8 +211,8 @@ class MRD(Model):
# g.Z = Z.reshape(self.num_inducing, self.input_dim) # g.Z = Z.reshape(self.num_inducing, self.input_dim)
# #
# def _set_kern_params(self, g, p): # def _set_kern_params(self, g, p):
# g.kern._set_params(p[:g.kern.Nparam]) # g.kern._set_params(p[:g.kern.num_params])
# g.likelihood._set_params(p[g.kern.Nparam:]) # g.likelihood._set_params(p[g.kern.num_params:])
def _set_params(self, x): def _set_params(self, x):
start = 0; end = self.NQ start = 0; end = self.NQ