Added first draft of polynomial kernel.

This commit is contained in:
Neil Lawrence 2013-08-17 09:07:09 +02:00
parent a570198029
commit 3f6c01898d
4 changed files with 27 additions and 5 deletions

View file

@ -69,6 +69,26 @@ def mlp(input_dim,variance=1., weight_variance=None,bias_variance=100.,ARD=False
part = parts.mlp.MLP(input_dim,variance,weight_variance,bias_variance,ARD)
return kern(input_dim, [part])
def poly(input_dim,variance=1., weight_variance=None,bias_variance=1.,degree=2, ARD=False):
"""
Construct a polynomial kernel
:param input_dim: dimensionality of the kernel, obligatory
:type input_dim: int
:param variance: the variance of the kernel
:type variance: float
:param weight_scale: the lengthscale of the kernel
:type weight_scale: vector of weight variances for input weights.
:param bias_variance: the variance of the biases.
:type bias_variance: float
:param degree: the degree of the polynomial
:type degree: int
:param ARD: Auto Relevance Determination (allows for ARD version of covariance)
:type ARD: Boolean
"""
part = parts.poly.POLY(input_dim,variance,weight_variance,bias_variance,degree,ARD)
return kern(input_dim, [part])
def white(input_dim,variance=1.):
"""
Construct a white kernel.

View file

@ -12,6 +12,7 @@ import mlp
import periodic_exponential
import periodic_Matern32
import periodic_Matern52
import poly
import prod_orthogonal
import prod
import rational_quadratic

View file

@ -56,5 +56,5 @@ class Kernpart(object):
raise NotImplementedError
def dpsi2_dmuS(self,dL_dpsi2,Z,mu,S,target_mu,target_S):
raise NotImplementedError
def dK_dX(self,X,X2,target):
def dK_dX(self, dL_dK, X, X2, target):
raise NotImplementedError

View file

@ -111,10 +111,10 @@ class MLP(Kernpart):
gX = np.zeros((X2.shape[0], X.shape[1], X.shape[0]))
for i in range(X.shape[0]):
gX[:, :, i] = self._dK_dX_point(X, X2, i)
gX[:, :, i] = self._dK_dX_point(dL_dK, X, X2, target, i)
def _dK_dX_point(self, X, X2, i):
def _dK_dX_point(self, dL_dK, X, X2, target, i):
"""Gradient with respect to one point of X"""
inner_prod = self._K_inner_prod[i, :].T
@ -127,10 +127,11 @@ class MLP(Kernpart):
#arg = numer/denom
gX = np.zeros(X2.shape)
denom3 = denom*denom*denom
gX = np.zeros((X2.shape[0], X2.shape[1]))
for j in range(X2.shape[1]):
gX[:, j] =X2[:, j]/denom - vec2*X[i, j]*numer/denom3
gX[:, j] = four_over_tau*self.weight_variance*self.variance*gX[:, j]/np.sqrt(1-arg*arg)
target[i, :]
def _K_computations(self, X, X2):