small changes in the way covariance functions handle lengthscale as input

This commit is contained in:
Nicolas 2013-01-31 17:19:15 +00:00
parent 3a052cd5eb
commit a510524620
5 changed files with 39 additions and 40 deletions

View file

@ -14,14 +14,14 @@ class Matern32(kernpart):
.. math::
k(r) = \sigma^2 (1 + \sqrt{3} r) \exp(- \sqrt{3} r) \qquad \qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
k(r) = \sigma^2 (1 + \sqrt{3} r) \exp(- \sqrt{3} r) \\qquad \\qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
:param D: the number of input dimensions
:type D: int
:param variance: the variance :math:`\sigma^2`
:type variance: float
:param lengthscale: the vector of lengthscale :math:`\ell_i`
:type lengthscale: np.ndarray of size (1,) or (D,) depending on ARD
:type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension.
:type ARD: Boolean
:rtype: kernel object
@ -35,17 +35,19 @@ class Matern32(kernpart):
self.Nparam = 2
self.name = 'Mat32'
if lengthscale is not None:
assert lengthscale.shape == (1,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel"
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.D + 1
self.name = 'Mat32_ARD'
self.name = 'Mat32'
if lengthscale is not None:
assert lengthscale.shape == (self.D,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == self.D, "bad number of lengthscales"
else:
lengthscale = np.ones(self.D)
self._set_params(np.hstack((variance,lengthscale)))
self._set_params(np.hstack((variance,lengthscale.flatten())))
def _get_params(self):
"""return the value of the parameters."""
@ -116,9 +118,9 @@ class Matern32(kernpart):
:param F1: vector of derivatives of F
:type F1: np.array
:param F2: vector of second derivatives of F
:type F2: np.array
:type F2: np.array
:param lower,upper: boundaries of the input domain
:type lower,upper: floats
:type lower,upper: floats
"""
assert self.D == 1
def L(x,i):
@ -133,4 +135,3 @@ class Matern32(kernpart):
#print "OLD \n", np.dot(F1lower,F1lower.T), "\n \n"
#return(G)
return(self.lengthscale**3/(12.*np.sqrt(3)*self.variance) * G + 1./self.variance*np.dot(Flower,Flower.T) + self.lengthscale**2/(3.*self.variance)*np.dot(F1lower,F1lower.T))

View file

@ -13,14 +13,14 @@ class Matern52(kernpart):
.. math::
k(r) = \sigma^2 (1 + \sqrt{5} r + \\frac53 r^2) \exp(- \sqrt{5} r) \qquad \qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
k(r) = \sigma^2 (1 + \sqrt{5} r + \\frac53 r^2) \exp(- \sqrt{5} r) \\qquad \\qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
:param D: the number of input dimensions
:type D: int
:param variance: the variance :math:`\sigma^2`
:type variance: float
:param lengthscale: the vector of lengthscale :math:`\ell_i`
:type lengthscale: np.ndarray of size (1,) or (D,) depending on ARD
:type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension.
:type ARD: Boolean
:rtype: kernel object
@ -33,17 +33,19 @@ class Matern52(kernpart):
self.Nparam = 2
self.name = 'Mat52'
if lengthscale is not None:
assert lengthscale.shape == (1,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel"
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.D + 1
self.name = 'Mat52_ARD'
self.name = 'Mat52'
if lengthscale is not None:
assert lengthscale.shape == (self.D,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == self.D, "bad number of lengthscales"
else:
lengthscale = np.ones(self.D)
self._set_params(np.hstack((variance,lengthscale)))
self._set_params(np.hstack((variance,lengthscale.flatten())))
def _get_params(self):
"""return the value of the parameters."""

View file

@ -13,14 +13,14 @@ class exponential(kernpart):
.. math::
k(r) = \sigma^2 \exp(- r) \qquad \qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
k(r) = \sigma^2 \exp(- r) \\qquad \\qquad \\text{ where } r = \sqrt{\sum_{i=1}^D \\frac{(x_i-y_i)^2}{\ell_i^2} }
:param D: the number of input dimensions
:type D: int
:param variance: the variance :math:`\sigma^2`
:type variance: float
:param lengthscale: the vector of lengthscale :math:`\ell_i`
:type lengthscale: np.ndarray of size (1,) or (D,) depending on ARD
:type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension.
:type ARD: Boolean
:rtype: kernel object
@ -33,17 +33,19 @@ class exponential(kernpart):
self.Nparam = 2
self.name = 'exp'
if lengthscale is not None:
assert lengthscale.shape == (1,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == 1, "Only one lengthscale needed for non-ARD kernel"
else:
lengthscale = np.ones(1)
else:
self.Nparam = self.D + 1
self.name = 'exp_ARD'
self.name = 'exp'
if lengthscale is not None:
assert lengthscale.shape == (self.D,)
lengthscale = np.asarray(lengthscale)
assert lengthscale.size == self.D, "bad number of lengthscales"
else:
lengthscale = np.ones(self.D)
self._set_params(np.hstack((variance,lengthscale)))
self._set_params(np.hstack((variance,lengthscale.flatten())))
def _get_params(self):
"""return the value of the parameters."""
@ -87,7 +89,7 @@ class exponential(kernpart):
dl = self.variance*dvar*dist2M.sum(-1)*invdist
target[1] += np.sum(dl*partial)
def dKdiag_dtheta(self,partial,X,target):
def dKdiag_dtheta(self,partial,X,target):
"""derivative of the diagonal of the covariance matrix with respect to the parameters."""
#NB: derivative of diagonal elements wrt lengthscale is 0
target[0] += np.sum(partial)
@ -110,9 +112,9 @@ class exponential(kernpart):
:param F: vector of functions
:type F: np.array
:param F1: vector of derivatives of F
:type F1: np.array
:type F1: np.array
:param lower,upper: boundaries of the input domain
:type lower,upper: floats
:type lower,upper: floats
"""
assert self.D == 1
def L(x,i):
@ -124,8 +126,3 @@ class exponential(kernpart):
G[i,j] = G[j,i] = integrate.quad(lambda x : L(x,i)*L(x,j),lower,upper)[0]
Flower = np.array([f(lower) for f in F])[:,None]
return(self.lengthscale/2./self.variance * G + 1./self.variance * np.dot(Flower,Flower.T))

View file

@ -15,8 +15,8 @@ class linear(kernpart):
:param D: the number of input dimensions
:type D: int
:param variances: the vector of variances :math:`\sigma^2_i`
:type variances: np.ndarray of size (1,) or (D,) depending on ARD
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single variance parameter \sigma^2), otherwise there is one variance parameter per dimension.
:type variances: array or list of the appropriate size (or float if there is only one variance parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel has only one variance parameter \sigma^2, otherwise there is one variance parameter per dimension.
:type ARD: Boolean
:rtype: kernel object
"""
@ -28,21 +28,20 @@ class linear(kernpart):
self.Nparam = 1
self.name = 'linear'
if variances is not None:
if isinstance(variances, float):
variances = np.array([variances])
assert variances.shape == (1,)
variances = np.asarray(variances)
assert variances.size == 1, "Only one variance needed for non-ARD kernel"
else:
variances = np.ones(1)
self._Xcache, self._X2cache = np.empty(shape=(2,))
else:
self.Nparam = self.D
self.name = 'linear_ARD'
self.name = 'linear'
if variances is not None:
assert variances.shape == (self.D,)
variances = np.asarray(variances)
assert variances.size == self.D, "bad number of lengthscales"
else:
variances = np.ones(self.D)
self._set_params(variances)
self._set_params(variances.flatten())
def _get_params(self):
return self.variances

View file

@ -12,7 +12,7 @@ class rbf(kernpart):
.. math::
k(r) = \sigma^2 \exp(- \frac{1}{2}r^2) \qquad \qquad \\text{ where } r^2 = \sum_{i=1}^d \frac{ (x_i-x^\prime_i)^2}{\ell_i^2}}
k(r) = \sigma^2 \exp(- \frac{1}{2}r^2) \\qquad \\qquad \\text{ where } r^2 = \sum_{i=1}^d \frac{ (x_i-x^\prime_i)^2}{\ell_i^2}}
where \ell_i is the lengthscale, \sigma^2 the variance and d the dimensionality of the input.
@ -21,7 +21,7 @@ class rbf(kernpart):
:param variance: the variance of the kernel
:type variance: float
:param lengthscale: the vector of lengthscale of the kernel
:type lengthscale: np.ndarray od size (1,) or (D,) depending on ARD
:type lengthscale: array or list of the appropriate size (or float if there is only one lengthscale parameter)
:param ARD: Auto Relevance Determination. If equal to "False", the kernel is isotropic (ie. one single lengthscale parameter \ell), otherwise there is one lengthscale parameter per dimension.
:type ARD: Boolean
:rtype: kernel object