From 76598bac412d7e34f7dc2bee58e0b3f6c4b8c077 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Thu, 16 Oct 2014 13:44:34 +0100 Subject: [PATCH 1/3] commented out stochastics.py -- not added --- GPy/core/sparse_gp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/GPy/core/sparse_gp.py b/GPy/core/sparse_gp.py index 120f0d94..e451b684 100644 --- a/GPy/core/sparse_gp.py +++ b/GPy/core/sparse_gp.py @@ -10,8 +10,8 @@ from parameterization.variational import VariationalPosterior import logging from GPy.inference.latent_function_inference.posterior import Posterior -from GPy.inference.optimization.stochastics import SparseGPStochastics,\ - SparseGPMissing +#no stochastics.py file added! from GPy.inference.optimization.stochastics import SparseGPStochastics,\ + #SparseGPMissing logger = logging.getLogger("sparse gp") class SparseGP(GP): From 3b2d16a0e2734bd45aa3992f26a3a16d9bf2409a Mon Sep 17 00:00:00 2001 From: James Hensman Date: Thu, 16 Oct 2014 14:02:02 +0100 Subject: [PATCH 2/3] docstrings --- GPy/kern/_src/stationary.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/GPy/kern/_src/stationary.py b/GPy/kern/_src/stationary.py index 1685e7c7..1558f0a5 100644 --- a/GPy/kern/_src/stationary.py +++ b/GPy/kern/_src/stationary.py @@ -8,9 +8,9 @@ from ...core.parameterization.transformations import Logexp from ...util.linalg import tdot from ... import util import numpy as np -from scipy import integrate -from ...util.caching import Cache_this +from scipy import integrate, weave from ...util.config import config # for assesing whether to use weave +from ...util.caching import Cache_this class Stationary(Kern): """ @@ -132,10 +132,22 @@ class Stationary(Kern): return ret def update_gradients_diag(self, dL_dKdiag, X): + """ + Given the derivative of the objective with respect to the diagonal of + the covariance matrix, compute the derivative wrt the parameters of + this kernel and stor in the .gradient field. + + See also update_gradients_full + """ self.variance.gradient = np.sum(dL_dKdiag) self.lengthscale.gradient = 0. def update_gradients_full(self, dL_dK, X, X2=None): + """ + Given the derivative of the objective wrt the covariance matrix + (dL_dK), compute the gradient wrt the parameters of this kernel, + and store in the parameters object as e.g. self.variance.gradient + """ self.variance.gradient = np.einsum('ij,ij,i', self.K(X, X2), dL_dK, 1./self.variance) #now the lengthscale gradient(s) @@ -173,6 +185,7 @@ class Stationary(Kern): return 1./np.where(dist != 0., dist, np.inf) def weave_lengthscale_grads(self, tmp, X, X2): + """Use scipy.weave to compute derivatives wrt the lengthscales""" N,M = tmp.shape Q = X.shape[1] if hasattr(X, 'values'):X = X.values @@ -190,7 +203,6 @@ class Stationary(Kern): grads[q] = gradq; } """ - from scipy import weave weave.inline(code, ['tmp', 'X', 'X2', 'grads', 'N', 'M', 'Q'], type_converters=weave.converters.blitz, support_code="#include ") return -grads/self.lengthscale**3 From 50963ae28df0e1c0956fdee02b793a02f081d768 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Thu, 16 Oct 2014 14:09:29 +0100 Subject: [PATCH 3/3] fixing more issues cauesd by removal of symbolic.py --- GPy/likelihoods/sstudent_t.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GPy/likelihoods/sstudent_t.py b/GPy/likelihoods/sstudent_t.py index e8fb03ce..790c1437 100644 --- a/GPy/likelihoods/sstudent_t.py +++ b/GPy/likelihoods/sstudent_t.py @@ -4,7 +4,7 @@ import sympy as sym from sympy.utilities.lambdify import lambdify -from GPy.util.symbolic import gammaln +# does not exist! JH from GPy.util.symbolic import gammaln import numpy as np import link_functions