Nparams > num_params and Nparam_tranformed > num_params_transformed

This commit is contained in:
Max Zwiessele 2013-06-05 15:29:18 +01:00
parent 35c2a8b521
commit db78b233b8
25 changed files with 119 additions and 121 deletions

View file

@ -33,8 +33,8 @@ class GP(GPBase):
self._set_params(self._get_params())
def _set_params(self, p):
self.kern._set_params_transformed(p[:self.kern.Nparam_transformed()])
self.likelihood._set_params(p[self.kern.Nparam_transformed():])
self.kern._set_params_transformed(p[:self.kern.num_params_transformed()])
self.likelihood._set_params(p[self.kern.num_params_transformed():])
self.K = self.kern.K(self.X)
self.K += self.likelihood.covariance_matrix

View file

@ -24,7 +24,7 @@ class model(parameterised):
self.optimization_runs = []
self.sampling_runs = []
self.preferred_optimizer = 'scg'
#self._set_params(self._get_params()) has been taken out as it should only be called on leaf nodes
# self._set_params(self._get_params()) has been taken out as it should only be called on leaf nodes
def _get_params(self):
raise NotImplementedError, "this needs to be implemented to use the model class"
def _set_params(self, x):
@ -65,7 +65,7 @@ class model(parameterised):
if len(tie_matches) > 1:
raise ValueError, "cannot place Prior across multiple ties"
elif len(tie_matches) == 1:
which = which[:1] # just place a Prior object on the first parameter
which = which[:1] # just place a Prior object on the first parameter
# check constraints are okay
@ -147,10 +147,10 @@ class model(parameterised):
if self.priors is not None:
[np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None]
self._set_params(x)
self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
def optimize_restarts(self, Nrestarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs):
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs):
"""
Perform random restarts of the model, and set the model to the best
seen solution.
@ -179,19 +179,19 @@ class model(parameterised):
try:
jobs = []
pool = mp.Pool(processes=num_processes)
for i in range(Nrestarts):
for i in range(num_restarts):
self.randomize()
job = pool.apply_async(opt_wrapper, args=(self,), kwds=kwargs)
jobs.append(job)
pool.close() # signal that no more data coming in
pool.join() # wait for all the tasks to complete
pool.close() # signal that no more data coming in
pool.join() # wait for all the tasks to complete
except KeyboardInterrupt:
print "Ctrl+c received, terminating and joining pool."
pool.terminate()
pool.join()
for i in range(Nrestarts):
for i in range(num_restarts):
try:
if not parallel:
self.randomize()
@ -200,10 +200,10 @@ class model(parameterised):
self.optimization_runs.append(jobs[i].get())
if verbose:
print("Optimization restart {0}/{1}, f = {2}".format(i + 1, Nrestarts, self.optimization_runs[-1].f_opt))
print("Optimization restart {0}/{1}, f = {2}".format(i + 1, num_restarts, self.optimization_runs[-1].f_opt))
except Exception as e:
if robust:
print("Warning - optimization restart {0}/{1} failed".format(i + 1, Nrestarts))
print("Warning - optimization restart {0}/{1} failed".format(i + 1, num_restarts))
else:
raise e
@ -222,7 +222,7 @@ class model(parameterised):
currently_constrained = self.all_constrained_indices()
to_make_positive = []
for s in positive_strings:
for i in self.grep_param_names(".*"+s):
for i in self.grep_param_names(".*" + s):
if not (i in currently_constrained):
to_make_positive.append(i)
if len(to_make_positive):
@ -240,13 +240,13 @@ class model(parameterised):
Gets the gradients from the likelihood and the priors.
"""
self._set_params_transformed(x)
obj_grads = - self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients())
obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients())
return obj_grads
def objective_and_gradients(self, x):
self._set_params_transformed(x)
obj_f = -self.log_likelihood() - self.log_prior()
obj_grads = - self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients())
obj_grads = -self._transform_gradients(self._log_likelihood_gradients() + self._log_prior_gradients())
return obj_f, obj_grads
def optimize(self, optimizer=None, start=None, **kwargs):
@ -315,7 +315,7 @@ class model(parameterised):
if self.priors is not None:
strs = [str(p) if p is not None else '' for p in self.priors]
else:
strs = ['']*len(self._get_params())
strs = [''] * len(self._get_params())
width = np.array(max([len(p) for p in strs] + [5])) + 4
log_like = self.log_likelihood()
@ -474,8 +474,8 @@ class model(parameterised):
ll_change = new_ll - last_ll
if ll_change < 0:
self.likelihood = last_approximation # restore previous likelihood approximation
self._set_params(last_params) # restore model parameters
self.likelihood = last_approximation # restore previous likelihood approximation
self._set_params(last_params) # restore model parameters
print "Log-likelihood decrement: %s \nLast likelihood update discarded." % ll_change
stop = True
else:

View file

@ -6,8 +6,6 @@ import numpy as np
import re
import copy
import cPickle
import os
from ..util.squashers import sigmoid
import warnings
import transformations
@ -113,7 +111,7 @@ class parameterised(object):
if hasattr(self, 'prior'):
pass
self._set_params_transformed(self._get_params_transformed()) # sets tied parameters to single value
self._set_params_transformed(self._get_params_transformed()) # sets tied parameters to single value
def untie_everything(self):
"""Unties all parameters by setting tied_indices to an empty list."""
@ -145,7 +143,7 @@ class parameterised(object):
else:
return np.nonzero([regexp.match(name) for name in names])[0]
def Nparam_transformed(self):
def num_params_transformed(self):
removed = 0
for tie in self.tied_indices:
removed += tie.size - 1
@ -159,18 +157,18 @@ class parameterised(object):
"""Unconstrain matching parameters. does not untie parameters"""
matches = self.grep_param_names(regexp)
#tranformed contraints:
# tranformed contraints:
for match in matches:
self.constrained_indices = [i[i<>match] for i in self.constrained_indices]
self.constrained_indices = [i[i <> match] for i in self.constrained_indices]
#remove empty constraints
tmp = zip(*[(i,t) for i,t in zip(self.constrained_indices,self.constraints) if len(i)])
# remove empty constraints
tmp = zip(*[(i, t) for i, t in zip(self.constrained_indices, self.constraints) if len(i)])
if tmp:
self.constrained_indices, self.constraints = zip(*[(i,t) for i,t in zip(self.constrained_indices,self.constraints) if len(i)])
self.constrained_indices, self.constraints = zip(*[(i, t) for i, t in zip(self.constrained_indices, self.constraints) if len(i)])
self.constrained_indices, self.constraints = list(self.constrained_indices), list(self.constraints)
# fixed:
self.fixed_values = [np.delete(values, np.nonzero(np.sum(indices[:, None] == matches[None, :], 1))[0]) for indices,values in zip(self.fixed_indices,self.fixed_values)]
self.fixed_values = [np.delete(values, np.nonzero(np.sum(indices[:, None] == matches[None, :], 1))[0]) for indices, values in zip(self.fixed_indices, self.fixed_values)]
self.fixed_indices = [np.delete(indices, np.nonzero(np.sum(indices[:, None] == matches[None, :], 1))[0]) for indices in self.fixed_indices]
# remove empty elements
@ -189,7 +187,7 @@ class parameterised(object):
""" Set positive constraints. """
self.constrain(regexp, transformations.logexp())
def constrain_bounded(self, regexp,lower, upper):
def constrain_bounded(self, regexp, lower, upper):
""" Set bounded constraints. """
self.constrain(regexp, transformations.logistic(lower, upper))
@ -199,8 +197,8 @@ class parameterised(object):
else:
return np.empty(shape=(0,))
def constrain(self,regexp,transform):
assert isinstance(transform,transformations.transformation)
def constrain(self, regexp, transform):
assert isinstance(transform, transformations.transformation)
matches = self.grep_param_names(regexp)
overlap = set(matches).intersection(set(self.all_constrained_indices()))
@ -251,7 +249,7 @@ class parameterised(object):
def _get_params_transformed(self):
"""use self._get_params to get the 'true' parameters of the model, which are then tied, constrained and fixed"""
x = self._get_params()
[np.put(x,i,t.finv(x[i])) for i,t in zip(self.constrained_indices,self.constraints)]
[np.put(x, i, t.finv(x[i])) for i, t in zip(self.constrained_indices, self.constraints)]
to_remove = self.fixed_indices + [t[1:] for t in self.tied_indices]
if len(to_remove):
@ -263,7 +261,7 @@ class parameterised(object):
""" takes the vector x, which is then modified (by untying, reparameterising or inserting fixed values), and then call self._set_params"""
self._set_params(self._untransform_params(x))
def _untransform_params(self,x):
def _untransform_params(self, x):
"""
The transformation required for _set_params_transformed.
@ -290,9 +288,9 @@ class parameterised(object):
[np.put(xx, i, v) for i, v in zip(self.fixed_indices, self.fixed_values)]
[np.put(xx, i, v) for i, v in [(t[1:], xx[t[0]]) for t in self.tied_indices] ]
[np.put(xx,i,t.f(xx[i])) for i,t in zip(self.constrained_indices, self.constraints)]
if hasattr(self,'debug'):
stop
[np.put(xx, i, t.f(xx[i])) for i, t in zip(self.constrained_indices, self.constraints)]
if hasattr(self, 'debug'):
stop # @UndefinedVariable
return xx
@ -316,7 +314,7 @@ class parameterised(object):
remove = np.hstack((remove, np.hstack(self.fixed_indices)))
# add markers to show that some variables are constrained
for i,t in zip(self.constrained_indices,self.constraints):
for i, t in zip(self.constrained_indices, self.constraints):
for ii in i:
n[ii] = n[ii] + t.__str__()
@ -333,10 +331,10 @@ class parameterised(object):
if not N:
return "This object has no free parameters."
header = ['Name', 'Value', 'Constraints', 'Ties']
values = self._get_params() # map(str,self._get_params())
values = self._get_params() # map(str,self._get_params())
# sort out the constraints
constraints = [''] * len(names)
for i,t in zip(self.constrained_indices,self.constraints):
for i, t in zip(self.constrained_indices, self.constraints):
for ii in i:
constraints[ii] = t.__str__()
for i in self.fixed_indices:
@ -354,7 +352,7 @@ class parameterised(object):
max_constraint = max([len(constraints[i]) for i in range(len(constraints))] + [len(header[2])])
max_ties = max([len(ties[i]) for i in range(len(ties))] + [len(header[3])])
cols = np.array([max_names, max_values, max_constraint, max_ties]) + 4
columns = cols.sum()
# columns = cols.sum()
header_string = ["{h:^{col}}".format(h=header[i], col=cols[i]) for i in range(len(cols))]
header_string = map(lambda x: '|'.join(x), [header_string])

View file

@ -153,8 +153,8 @@ class SparseGP(GPBase):
def _set_params(self, p):
self.Z = p[:self.num_inducing * self.output_dim].reshape(self.num_inducing, self.input_dim)
self.kern._set_params(p[self.Z.size:self.Z.size + self.kern.Nparam])
self.likelihood._set_params(p[self.Z.size + self.kern.Nparam:])
self.kern._set_params(p[self.Z.size:self.Z.size + self.kern.num_params])
self.likelihood._set_params(p[self.Z.size + self.kern.num_params:])
self._compute_kernel_matrices()
self._computations()

View file

@ -33,7 +33,7 @@ def tuto_GP_regression():
m.optimize()
m.optimize_restarts(Nrestarts = 10)
m.optimize_restarts(num_restarts = 10)
###########################
# 2-dimensional example #

View file

@ -21,7 +21,7 @@ class Brownian(kernpart):
def __init__(self,input_dim,variance=1.):
self.input_dim = input_dim
assert self.input_dim==1, "Brownian motion in 1D only"
self.Nparam = 1.
self.num_params = 1.
self.name = 'Brownian'
self._set_params(np.array([variance]).flatten())

View file

@ -32,7 +32,7 @@ class Matern32(kernpart):
self.input_dim = input_dim
self.ARD = ARD
if ARD == False:
self.Nparam = 2
self.num_params = 2
self.name = 'Mat32'
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
@ -40,7 +40,7 @@ class Matern32(kernpart):
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.input_dim + 1
self.num_params = self.input_dim + 1
self.name = 'Mat32'
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
@ -55,13 +55,13 @@ class Matern32(kernpart):
def _set_params(self,x):
"""set the value of the parameters."""
assert x.size == self.Nparam
assert x.size == self.num_params
self.variance = x[0]
self.lengthscale = x[1:]
def _get_param_names(self):
"""return parameter names."""
if self.Nparam == 2:
if self.num_params == 2:
return ['variance','lengthscale']
else:
return ['variance']+['lengthscale_%i'%i for i in range(self.lengthscale.size)]

View file

@ -30,7 +30,7 @@ class Matern52(kernpart):
self.input_dim = input_dim
self.ARD = ARD
if ARD == False:
self.Nparam = 2
self.num_params = 2
self.name = 'Mat52'
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
@ -38,7 +38,7 @@ class Matern52(kernpart):
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.input_dim + 1
self.num_params = self.input_dim + 1
self.name = 'Mat52'
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
@ -53,13 +53,13 @@ class Matern52(kernpart):
def _set_params(self,x):
"""set the value of the parameters."""
assert x.size == self.Nparam
assert x.size == self.num_params
self.variance = x[0]
self.lengthscale = x[1:]
def _get_param_names(self):
"""return parameter names."""
if self.Nparam == 2:
if self.num_params == 2:
return ['variance','lengthscale']
else:
return ['variance']+['lengthscale_%i'%i for i in range(self.lengthscale.size)]

View file

@ -15,7 +15,7 @@ class bias(kernpart):
:type variance: float
"""
self.input_dim = input_dim
self.Nparam = 1
self.num_params = 1
self.name = 'bias'
self._set_params(np.array([variance]).flatten())

View file

@ -26,14 +26,14 @@ class Coregionalise(kernpart):
else:
assert kappa.shape==(self.Nout,)
self.kappa = kappa
self.Nparam = self.Nout*(self.R + 1)
self.num_params = self.Nout*(self.R + 1)
self._set_params(np.hstack([self.W.flatten(),self.kappa]))
def _get_params(self):
return np.hstack([self.W.flatten(),self.kappa])
def _set_params(self,x):
assert x.size == self.Nparam
assert x.size == self.num_params
self.kappa = x[-self.Nout:]
self.W = x[:-self.Nout].reshape(self.Nout,self.R)
self.B = np.dot(self.W,self.W.T) + np.diag(self.kappa)

View file

@ -27,7 +27,7 @@ class kern(parameterised):
"""
self.parts = parts
self.Nparts = len(parts)
self.Nparam = sum([p.Nparam for p in self.parts])
self.num_params = sum([p.num_params for p in self.parts])
self.input_dim = input_dim
@ -80,8 +80,8 @@ class kern(parameterised):
self.param_slices = []
count = 0
for p in self.parts:
self.param_slices.append(slice(count, count + p.Nparam))
count += p.Nparam
self.param_slices.append(slice(count, count + p.num_params))
count += p.num_params
def __add__(self, other):
"""
@ -104,21 +104,21 @@ class kern(parameterised):
newkern = kern(D, self.parts + other.parts, self_input_slices + other_input_slices)
# transfer constraints:
newkern.constrained_indices = self.constrained_indices + [x + self.Nparam for x in other.constrained_indices]
newkern.constrained_indices = self.constrained_indices + [x + self.num_params for x in other.constrained_indices]
newkern.constraints = self.constraints + other.constraints
newkern.fixed_indices = self.fixed_indices + [self.Nparam + x for x in other.fixed_indices]
newkern.fixed_indices = self.fixed_indices + [self.num_params + x for x in other.fixed_indices]
newkern.fixed_values = self.fixed_values + other.fixed_values
newkern.constraints = self.constraints + other.constraints
newkern.tied_indices = self.tied_indices + [self.Nparam + x for x in other.tied_indices]
newkern.tied_indices = self.tied_indices + [self.num_params + x for x in other.tied_indices]
else:
assert self.input_dim == other.input_dim
newkern = kern(self.input_dim, self.parts + other.parts, self.input_slices + other.input_slices)
# transfer constraints:
newkern.constrained_indices = self.constrained_indices + [i + self.Nparam for i in other.constrained_indices]
newkern.constrained_indices = self.constrained_indices + [i + self.num_params for i in other.constrained_indices]
newkern.constraints = self.constraints + other.constraints
newkern.fixed_indices = self.fixed_indices + [self.Nparam + x for x in other.fixed_indices]
newkern.fixed_indices = self.fixed_indices + [self.num_params + x for x in other.fixed_indices]
newkern.fixed_values = self.fixed_values + other.fixed_values
newkern.tied_indices = self.tied_indices + [self.Nparam + x for x in other.tied_indices]
newkern.tied_indices = self.tied_indices + [self.num_params + x for x in other.tied_indices]
return newkern
def __mul__(self, other):
@ -158,13 +158,13 @@ class kern(parameterised):
K1_param = []
n = 0
for k1 in K1.parts:
K1_param += [range(n, n + k1.Nparam)]
n += k1.Nparam
K1_param += [range(n, n + k1.num_params)]
n += k1.num_params
n = 0
K2_param = []
for k2 in K2.parts:
K2_param += [range(K1.Nparam + n, K1.Nparam + n + k2.Nparam)]
n += k2.Nparam
K2_param += [range(K1.num_params + n, K1.num_params + n + k2.num_params)]
n += k2.num_params
index_param = []
for p1 in K1_param:
for p2 in K2_param:
@ -172,12 +172,12 @@ class kern(parameterised):
index_param = np.array(index_param)
# Get the ties and constrains of the kernels before the multiplication
prev_ties = K1.tied_indices + [arr + K1.Nparam for arr in K2.tied_indices]
prev_ties = K1.tied_indices + [arr + K1.num_params for arr in K2.tied_indices]
prev_constr_ind = [K1.constrained_indices] + [K1.Nparam + i for i in K2.constrained_indices]
prev_constr_ind = [K1.constrained_indices] + [K1.num_params + i for i in K2.constrained_indices]
prev_constr = K1.constraints + K2.constraints
# prev_constr_fix = K1.fixed_indices + [arr + K1.Nparam for arr in K2.fixed_indices]
# prev_constr_fix = K1.fixed_indices + [arr + K1.num_params for arr in K2.fixed_indices]
# prev_constr_fix_values = K1.fixed_values + K2.fixed_values
# follow the previous ties
@ -186,7 +186,7 @@ class kern(parameterised):
index_param[np.where(index_param == j)[0]] = arr[0]
# ties and constrains
for i in range(K1.Nparam + K2.Nparam):
for i in range(K1.num_params + K2.num_params):
index = np.where(index_param == i)[0]
if index.size > 1:
self.tie_params(index)
@ -230,7 +230,7 @@ class kern(parameterised):
:type X2: np.ndarray (M x input_dim)
"""
assert X.shape[1] == self.input_dim
target = np.zeros(self.Nparam)
target = np.zeros(self.num_params)
if X2 is None:
[p.dK_dtheta(dL_dK, X[:, i_s], None, target[ps]) for p, i_s, ps, in zip(self.parts, self.input_slices, self.param_slices)]
else:
@ -259,7 +259,7 @@ class kern(parameterised):
def dKdiag_dtheta(self, dL_dKdiag, X):
assert X.shape[1] == self.input_dim
assert dL_dKdiag.size == X.shape[0]
target = np.zeros(self.Nparam)
target = np.zeros(self.num_params)
[p.dKdiag_dtheta(dL_dKdiag, X[:, i_s], target[ps]) for p, i_s, ps in zip(self.parts, self.input_slices, self.param_slices)]
return self._transform_gradients(target)
@ -275,7 +275,7 @@ class kern(parameterised):
return target
def dpsi0_dtheta(self, dL_dpsi0, Z, mu, S):
target = np.zeros(self.Nparam)
target = np.zeros(self.num_params)
[p.dpsi0_dtheta(dL_dpsi0, Z[:, i_s], mu[:, i_s], S[:, i_s], target[ps]) for p, ps, i_s in zip(self.parts, self.param_slices, self.input_slices)]
return self._transform_gradients(target)
@ -290,7 +290,7 @@ class kern(parameterised):
return target
def dpsi1_dtheta(self, dL_dpsi1, Z, mu, S):
target = np.zeros((self.Nparam))
target = np.zeros((self.num_params))
[p.dpsi1_dtheta(dL_dpsi1, Z[:, i_s], mu[:, i_s], S[:, i_s], target[ps]) for p, ps, i_s in zip(self.parts, self.param_slices, self.input_slices)]
return self._transform_gradients(target)
@ -333,7 +333,7 @@ class kern(parameterised):
return target
def dpsi2_dtheta(self, dL_dpsi2, Z, mu, S):
target = np.zeros(self.Nparam)
target = np.zeros(self.num_params)
[p.dpsi2_dtheta(dL_dpsi2, Z[:, i_s], mu[:, i_s], S[:, i_s], target[ps]) for p, i_s, ps in zip(self.parts, self.input_slices, self.param_slices)]
# compute the "cross" terms

View file

@ -13,7 +13,7 @@ class kernpart(object):
Do not instantiate.
"""
self.input_dim = input_dim
self.Nparam = 1
self.num_params = 1
self.name = 'unnamed'
def _get_params(self):

View file

@ -28,7 +28,7 @@ class linear(kernpart):
self.input_dim = input_dim
self.ARD = ARD
if ARD == False:
self.Nparam = 1
self.num_params = 1
self.name = 'linear'
if variances is not None:
variances = np.asarray(variances)
@ -37,7 +37,7 @@ class linear(kernpart):
variances = np.ones(1)
self._Xcache, self._X2cache = np.empty(shape=(2,))
else:
self.Nparam = self.input_dim
self.num_params = self.input_dim
self.name = 'linear'
if variances is not None:
variances = np.asarray(variances)
@ -54,12 +54,12 @@ class linear(kernpart):
return self.variances
def _set_params(self, x):
assert x.size == (self.Nparam)
assert x.size == (self.num_params)
self.variances = x
self.variances2 = np.square(self.variances)
def _get_param_names(self):
if self.Nparam == 1:
if self.num_params == 1:
return ['variance']
else:
return ['variance_%i' % i for i in range(self.variances.size)]

View file

@ -35,7 +35,7 @@ class periodic_Matern32(kernpart):
else:
lengthscale = np.ones(1)
self.lower,self.upper = lower, upper
self.Nparam = 3
self.num_params = 3
self.n_freq = n_freq
self.n_basis = 2*n_freq
self._set_params(np.hstack((variance,lengthscale,period)))

View file

@ -35,7 +35,7 @@ class periodic_Matern52(kernpart):
else:
lengthscale = np.ones(1)
self.lower,self.upper = lower, upper
self.Nparam = 3
self.num_params = 3
self.n_freq = n_freq
self.n_basis = 2*n_freq
self._set_params(np.hstack((variance,lengthscale,period)))

View file

@ -35,7 +35,7 @@ class periodic_exponential(kernpart):
else:
lengthscale = np.ones(1)
self.lower,self.upper = lower, upper
self.Nparam = 3
self.num_params = 3
self.n_freq = n_freq
self.n_basis = 2*n_freq
self._set_params(np.hstack((variance,lengthscale,period)))

View file

@ -17,7 +17,7 @@ class prod(kernpart):
"""
def __init__(self,k1,k2,tensor=False):
self.Nparam = k1.Nparam + k2.Nparam
self.num_params = k1.num_params + k2.num_params
self.name = k1.name + '<times>' + k2.name
self.k1 = k1
self.k2 = k2
@ -40,8 +40,8 @@ class prod(kernpart):
def _set_params(self,x):
"""set the value of the parameters."""
self.k1._set_params(x[:self.k1.Nparam])
self.k2._set_params(x[self.k1.Nparam:])
self.k1._set_params(x[:self.k1.num_params])
self.k2._set_params(x[self.k1.num_params:])
def _get_param_names(self):
"""return parameter names."""
@ -55,11 +55,11 @@ class prod(kernpart):
"""derivative of the covariance matrix with respect to the parameters."""
self._K_computations(X,X2)
if X2 is None:
self.k1.dK_dtheta(dL_dK*self._K2, X[:,self.slice1], None, target[:self.k1.Nparam])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.slice2], None, target[self.k1.Nparam:])
self.k1.dK_dtheta(dL_dK*self._K2, X[:,self.slice1], None, target[:self.k1.num_params])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.slice2], None, target[self.k1.num_params:])
else:
self.k1.dK_dtheta(dL_dK*self._K2, X[:,self.slice1], X2[:,self.slice1], target[:self.k1.Nparam])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.slice2], X2[:,self.slice2], target[self.k1.Nparam:])
self.k1.dK_dtheta(dL_dK*self._K2, X[:,self.slice1], X2[:,self.slice1], target[:self.k1.num_params])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.slice2], X2[:,self.slice2], target[self.k1.num_params:])
def Kdiag(self,X,target):
"""Compute the diagonal of the covariance matrix associated to X."""
@ -74,8 +74,8 @@ class prod(kernpart):
K2 = np.zeros(X.shape[0])
self.k1.Kdiag(X[:,self.slice1],K1)
self.k2.Kdiag(X[:,self.slice2],K2)
self.k1.dKdiag_dtheta(dL_dKdiag*K2,X[:,self.slice1],target[:self.k1.Nparam])
self.k2.dKdiag_dtheta(dL_dKdiag*K1,X[:,self.slice2],target[self.k1.Nparam:])
self.k1.dKdiag_dtheta(dL_dKdiag*K2,X[:,self.slice1],target[:self.k1.num_params])
self.k2.dKdiag_dtheta(dL_dKdiag*K1,X[:,self.slice2],target[self.k1.num_params:])
def dK_dX(self,dL_dK,X,X2,target):
"""derivative of the covariance matrix with respect to X."""

View file

@ -17,7 +17,7 @@ class prod_orthogonal(kernpart):
"""
def __init__(self,k1,k2):
self.input_dim = k1.input_dim + k2.input_dim
self.Nparam = k1.Nparam + k2.Nparam
self.num_params = k1.num_params + k2.num_params
self.name = k1.name + '<times>' + k2.name
self.k1 = k1
self.k2 = k2
@ -30,8 +30,8 @@ class prod_orthogonal(kernpart):
def _set_params(self,x):
"""set the value of the parameters."""
self.k1._set_params(x[:self.k1.Nparam])
self.k2._set_params(x[self.k1.Nparam:])
self.k1._set_params(x[:self.k1.num_params])
self.k2._set_params(x[self.k1.num_params:])
def _get_param_names(self):
"""return parameter names."""
@ -45,11 +45,11 @@ class prod_orthogonal(kernpart):
"""derivative of the covariance matrix with respect to the parameters."""
self._K_computations(X,X2)
if X2 is None:
self.k1.dK_dtheta(dL_dK*self._K2, X[:,:self.k1.input_dim], None, target[:self.k1.Nparam])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.k1.input_dim:], None, target[self.k1.Nparam:])
self.k1.dK_dtheta(dL_dK*self._K2, X[:,:self.k1.input_dim], None, target[:self.k1.num_params])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.k1.input_dim:], None, target[self.k1.num_params:])
else:
self.k1.dK_dtheta(dL_dK*self._K2, X[:,:self.k1.input_dim], X2[:,:self.k1.input_dim], target[:self.k1.Nparam])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.k1.input_dim:], X2[:,self.k1.input_dim:], target[self.k1.Nparam:])
self.k1.dK_dtheta(dL_dK*self._K2, X[:,:self.k1.input_dim], X2[:,:self.k1.input_dim], target[:self.k1.num_params])
self.k2.dK_dtheta(dL_dK*self._K1, X[:,self.k1.input_dim:], X2[:,self.k1.input_dim:], target[self.k1.num_params:])
def Kdiag(self,X,target):
"""Compute the diagonal of the covariance matrix associated to X."""
@ -64,8 +64,8 @@ class prod_orthogonal(kernpart):
K2 = np.zeros(X.shape[0])
self.k1.Kdiag(X[:,:self.k1.input_dim],K1)
self.k2.Kdiag(X[:,self.k1.input_dim:],K2)
self.k1.dKdiag_dtheta(dL_dKdiag*K2,X[:,:self.k1.input_dim],target[:self.k1.Nparam])
self.k2.dKdiag_dtheta(dL_dKdiag*K1,X[:,self.k1.input_dim:],target[self.k1.Nparam:])
self.k1.dKdiag_dtheta(dL_dKdiag*K2,X[:,:self.k1.input_dim],target[:self.k1.num_params])
self.k2.dKdiag_dtheta(dL_dKdiag*K1,X[:,self.k1.input_dim:],target[self.k1.num_params:])
def dK_dX(self,dL_dK,X,X2,target):
"""derivative of the covariance matrix with respect to X."""

View file

@ -27,7 +27,7 @@ class rational_quadratic(kernpart):
def __init__(self,input_dim,variance=1.,lengthscale=1.,power=1.):
assert input_dim == 1, "For this kernel we assume input_dim=1"
self.input_dim = input_dim
self.Nparam = 3
self.num_params = 3
self.name = 'rat_quad'
self.variance = variance
self.lengthscale = lengthscale

View file

@ -36,14 +36,14 @@ class rbf(kernpart):
self.name = 'rbf'
self.ARD = ARD
if not ARD:
self.Nparam = 2
self.num_params = 2
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel"
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.input_dim + 1
self.num_params = self.input_dim + 1
if lengthscale is not None:
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == self.input_dim, "bad number of lengthscales"
@ -67,7 +67,7 @@ class rbf(kernpart):
return np.hstack((self.variance, self.lengthscale))
def _set_params(self, x):
assert x.size == (self.Nparam)
assert x.size == (self.num_params)
self.variance = x[0]
self.lengthscale = x[1:]
self.lengthscale2 = np.square(self.lengthscale)
@ -76,7 +76,7 @@ class rbf(kernpart):
self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S
def _get_param_names(self):
if self.Nparam == 2:
if self.num_params == 2:
return ['variance', 'lengthscale']
else:
return ['variance'] + ['lengthscale_%i' % i for i in range(self.lengthscale.size)]

View file

@ -14,9 +14,9 @@ class rbfcos(kernpart):
print "Warning: the rbfcos kernel requires a lot of memory for high dimensional inputs"
self.ARD = ARD
#set the default frequencies and bandwidths, appropriate Nparam
#set the default frequencies and bandwidths, appropriate num_params
if ARD:
self.Nparam = 2*self.input_dim + 1
self.num_params = 2*self.input_dim + 1
if frequencies is not None:
frequencies = np.asarray(frequencies)
assert frequencies.size == self.input_dim, "bad number of frequencies"
@ -28,7 +28,7 @@ class rbfcos(kernpart):
else:
bandwidths = np.ones(self.input_dim)
else:
self.Nparam = 3
self.num_params = 3
if frequencies is not None:
frequencies = np.asarray(frequencies)
assert frequencies.size == 1, "Exactly one frequency needed for non-ARD kernel"
@ -51,7 +51,7 @@ class rbfcos(kernpart):
return np.hstack((self.variance,self.frequencies, self.bandwidths))
def _set_params(self,x):
assert x.size==(self.Nparam)
assert x.size==(self.num_params)
if self.ARD:
self.variance = x[0]
self.frequencies = x[1:1+self.input_dim]
@ -60,7 +60,7 @@ class rbfcos(kernpart):
self.variance, self.frequencies, self.bandwidths = x
def _get_param_names(self):
if self.Nparam == 3:
if self.num_params == 3:
return ['variance','frequency','bandwidth']
else:
return ['variance']+['frequency_%i'%i for i in range(self.input_dim)]+['bandwidth_%i'%i for i in range(self.input_dim)]
@ -106,7 +106,7 @@ class rbfcos(kernpart):
self._dist2 = np.square(self._dist)
#ensure the next section is computed:
self._params = np.empty(self.Nparam)
self._params = np.empty(self.num_params)
if not np.all(self._params == self._get_params()):
self._params == self._get_params().copy()

View file

@ -23,7 +23,7 @@ class spline(kernpart):
def __init__(self,input_dim,variance=1.,lengthscale=1.):
self.input_dim = input_dim
assert self.input_dim==1
self.Nparam = 1
self.num_params = 1
self.name = 'spline'
self._set_params(np.squeeze(variance))

View file

@ -21,7 +21,7 @@ class symmetric(kernpart):
assert transform.shape == (k.input_dim, k.input_dim)
self.transform = transform
self.input_dim = k.input_dim
self.Nparam = k.Nparam
self.num_params = k.num_params
self.name = k.name + '_symm'
self.k = k
self._set_params(k._get_params())

View file

@ -38,12 +38,12 @@ class spkern(kernpart):
self.input_dim = len(self._sp_x)
assert self.input_dim == input_dim
self._sp_theta = sorted([e for e in sp_vars if not (e.name[0]=='x' or e.name[0]=='z')],key=lambda e:e.name)
self.Nparam = len(self._sp_theta)
self.num_params = len(self._sp_theta)
#deal with param
if param is None:
param = np.ones(self.Nparam)
assert param.size==self.Nparam
param = np.ones(self.num_params)
assert param.size==self.num_params
self._set_params(param)
#Differentiate!
@ -115,7 +115,7 @@ class spkern(kernpart):
#Here's some code to do the looping for K
arglist = ", ".join(["X[i*input_dim+%s]"%x.name[1:] for x in self._sp_x]\
+ ["Z[j*input_dim+%s]"%z.name[1:] for z in self._sp_z]\
+ ["param[%i]"%i for i in range(self.Nparam)])
+ ["param[%i]"%i for i in range(self.num_params)])
self._K_code =\
"""

View file

@ -15,7 +15,7 @@ class white(kernpart):
"""
def __init__(self,input_dim,variance=1.):
self.input_dim = input_dim
self.Nparam = 1
self.num_params = 1
self.name = 'white'
self._set_params(np.array([variance]).flatten())
self._psi1 = 0 # TODO: more elegance here