From 6823a1df2eb3ee12173d2e84c6e21870f1d6fd31 Mon Sep 17 00:00:00 2001 From: Zhenwen Dai Date: Wed, 24 Sep 2014 13:45:39 +0100 Subject: [PATCH] extend prod kernel for handling more than 2 kernels --- GPy/kern/_src/prod.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/GPy/kern/_src/prod.py b/GPy/kern/_src/prod.py index b8f92f27..ba7f9450 100644 --- a/GPy/kern/_src/prod.py +++ b/GPy/kern/_src/prod.py @@ -18,7 +18,6 @@ class Prod(CombinationKernel): """ def __init__(self, kernels, name='mul'): - assert len(kernels) == 2, 'only implemented for two kernels as of yet' super(Prod, self).__init__(kernels, name) @Cache_this(limit=2, force_kwargs=['which_parts']) @@ -37,25 +36,25 @@ class Prod(CombinationKernel): return reduce(np.multiply, (p.Kdiag(X) for p in which_parts)) def update_gradients_full(self, dL_dK, X, X2=None): - for k1,k2 in itertools.combinations(self.parts, 2): - k1.update_gradients_full(dL_dK*k2.K(X, X2), X, X2) - k2.update_gradients_full(dL_dK*k1.K(X, X2), X, X2) + k = self.K(X,X2)*dL_dK + for p in self.parts: + p.update_gradients_full(k/p.K(X,X2),X,X2) def update_gradients_diag(self, dL_dKdiag, X): - for k1,k2 in itertools.combinations(self.parts, 2): - k1.update_gradients_diag(dL_dKdiag*k2.Kdiag(X), X) - k2.update_gradients_diag(dL_dKdiag*k1.Kdiag(X), X) + k = self.Kdiag(X)*dL_dKdiag + for p in self.parts: + p.update_gradients_diag(k/p.Kdiag(X),X) def gradients_X(self, dL_dK, X, X2=None): target = np.zeros(X.shape) - for k1,k2 in itertools.combinations(self.parts, 2): - target += k1.gradients_X(dL_dK*k2.K(X, X2), X, X2) - target += k2.gradients_X(dL_dK*k1.K(X, X2), X, X2) + k = self.K(X,X2)*dL_dK + for p in self.parts: + target += p.gradients_X(k/p.K(X,X2),X,X2) return target def gradients_X_diag(self, dL_dKdiag, X): target = np.zeros(X.shape) - for k1,k2 in itertools.combinations(self.parts, 2): - target += k1.gradients_X_diag(dL_dKdiag*k2.Kdiag(X), X) - target += k2.gradients_X_diag(dL_dKdiag*k1.Kdiag(X), X) + k = self.Kdiag(X)*dL_dKdiag + for p in self.parts: + target += p.gradients_X_diag(k/p.Kdiag(X),X) return target