From beebf6933a3681c573ffd8e8b7f978e549e2c959 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Wed, 7 May 2014 14:53:10 +0100 Subject: [PATCH] added polynomial kernel --- GPy/kern/__init__.py | 1 + GPy/kern/_src/poly.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 GPy/kern/_src/poly.py diff --git a/GPy/kern/__init__.py b/GPy/kern/__init__.py index ccf73b97..1ed5e805 100644 --- a/GPy/kern/__init__.py +++ b/GPy/kern/__init__.py @@ -10,6 +10,7 @@ from _src.independent_outputs import IndependentOutputs, Hierarchical from _src.coregionalize import Coregionalize from _src.ssrbf import SSRBF # TODO: ZD: did you remove this? from _src.ODE_UY import ODE_UY +from _src.poly import Poly #from _src.ODE_UYC import ODE_UYC ADD THIS FILE TO THE REPO!! #from _src.ODE_st import ODE_st # TODO: put this in an init file somewhere diff --git a/GPy/kern/_src/poly.py b/GPy/kern/_src/poly.py new file mode 100644 index 00000000..d40f805c --- /dev/null +++ b/GPy/kern/_src/poly.py @@ -0,0 +1,42 @@ +# Copyright (c) 2014, James Hensman +# Licensed under the BSD 3-clause license (see LICENSE.txt) + +import numpy as np +from kern import Kern +from ...util.misc import param_to_array +from ...core.parameterization import Param +from ...core.parameterization.transformations import Logexp +class Poly(Kern): + """ + Polynomial kernel + """ + + def __init__(self, input_dim, variance=1., order=3., active_dims=None, name='poly'): + super(Poly, self).__init__(input_dim, active_dims, name) + self.variance = Param('variance', variance, Logexp()) + self.add_parameter(self.variance) + self.order=order + + def K(self, X, X2=None): + return (self._dot_product(X, X2) + 1.)**self.order * self.variance + + def _dot_product(self, X, X2=None): + if X2 is None: + return np.dot(X, X.T) + else: + return np.dot(X, X2.T) + + def Kdiag(self, X): + return self.variance*(np.square(X).sum(1) + 1.)**self.order + + def update_gradients_full(self, dL_dK, X, X2=None): + self.variance.gradient = np.sum(dL_dK * (self._dot_product(X, X2) + 1.)**self.order) + + def update_gradients_diag(self, dL_dKdiag, X): + raise NotImplementedError + + def gradients_X(self, dL_dK, X, X2=None): + raise NotImplementedError + + def gradients_X_diag(self, dL_dKdiag, X): + raise NotImplementedError