From 3f6c01898d4f64107e6f1d4d52e8bef0e83ebcfe Mon Sep 17 00:00:00 2001 From: Neil Lawrence Date: Sat, 17 Aug 2013 09:07:09 +0200 Subject: [PATCH] Added first draft of polynomial kernel. --- GPy/kern/constructors.py | 20 ++++++++++++++++++++ GPy/kern/parts/__init__.py | 1 + GPy/kern/parts/kernpart.py | 2 +- GPy/kern/parts/mlp.py | 9 +++++---- 4 files changed, 27 insertions(+), 5 deletions(-) diff --git a/GPy/kern/constructors.py b/GPy/kern/constructors.py index 9e976997..6fd3aa51 100644 --- a/GPy/kern/constructors.py +++ b/GPy/kern/constructors.py @@ -69,6 +69,26 @@ def mlp(input_dim,variance=1., weight_variance=None,bias_variance=100.,ARD=False part = parts.mlp.MLP(input_dim,variance,weight_variance,bias_variance,ARD) return kern(input_dim, [part]) +def poly(input_dim,variance=1., weight_variance=None,bias_variance=1.,degree=2, ARD=False): + """ + Construct a polynomial kernel + + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param weight_scale: the lengthscale of the kernel + :type weight_scale: vector of weight variances for input weights. + :param bias_variance: the variance of the biases. + :type bias_variance: float + :param degree: the degree of the polynomial + :type degree: int + :param ARD: Auto Relevance Determination (allows for ARD version of covariance) + :type ARD: Boolean + """ + part = parts.poly.POLY(input_dim,variance,weight_variance,bias_variance,degree,ARD) + return kern(input_dim, [part]) + def white(input_dim,variance=1.): """ Construct a white kernel. diff --git a/GPy/kern/parts/__init__.py b/GPy/kern/parts/__init__.py index 3b57fdc3..cf8df575 100644 --- a/GPy/kern/parts/__init__.py +++ b/GPy/kern/parts/__init__.py @@ -12,6 +12,7 @@ import mlp import periodic_exponential import periodic_Matern32 import periodic_Matern52 +import poly import prod_orthogonal import prod import rational_quadratic diff --git a/GPy/kern/parts/kernpart.py b/GPy/kern/parts/kernpart.py index bb836d1c..7fa54b42 100644 --- a/GPy/kern/parts/kernpart.py +++ b/GPy/kern/parts/kernpart.py @@ -56,5 +56,5 @@ class Kernpart(object): raise NotImplementedError def dpsi2_dmuS(self,dL_dpsi2,Z,mu,S,target_mu,target_S): raise NotImplementedError - def dK_dX(self,X,X2,target): + def dK_dX(self, dL_dK, X, X2, target): raise NotImplementedError diff --git a/GPy/kern/parts/mlp.py b/GPy/kern/parts/mlp.py index e2531b23..2b8d6799 100644 --- a/GPy/kern/parts/mlp.py +++ b/GPy/kern/parts/mlp.py @@ -111,10 +111,10 @@ class MLP(Kernpart): gX = np.zeros((X2.shape[0], X.shape[1], X.shape[0])) for i in range(X.shape[0]): - gX[:, :, i] = self._dK_dX_point(X, X2, i) + gX[:, :, i] = self._dK_dX_point(dL_dK, X, X2, target, i) - def _dK_dX_point(self, X, X2, i): + def _dK_dX_point(self, dL_dK, X, X2, target, i): """Gradient with respect to one point of X""" inner_prod = self._K_inner_prod[i, :].T @@ -127,10 +127,11 @@ class MLP(Kernpart): #arg = numer/denom gX = np.zeros(X2.shape) denom3 = denom*denom*denom + gX = np.zeros((X2.shape[0], X2.shape[1])) for j in range(X2.shape[1]): - gX[:, j]=X2[:, j]/denom - vec2*X[i, j]*numer/denom3 + gX[:, j] =X2[:, j]/denom - vec2*X[i, j]*numer/denom3 gX[:, j] = four_over_tau*self.weight_variance*self.variance*gX[:, j]/np.sqrt(1-arg*arg) - + target[i, :] def _K_computations(self, X, X2):