From 53081c704de370169400002dfcab8b21ebce82e2 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Tue, 24 Mar 2015 13:58:41 +0000 Subject: [PATCH 1/3] derivatives of likelihood things now working for svgp --- GPy/inference/latent_function_inference/svgp.py | 2 ++ GPy/likelihoods/likelihood.py | 6 +++++- GPy/likelihoods/student_t.py | 5 +++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/GPy/inference/latent_function_inference/svgp.py b/GPy/inference/latent_function_inference/svgp.py index 1974991b..5888bead 100644 --- a/GPy/inference/latent_function_inference/svgp.py +++ b/GPy/inference/latent_function_inference/svgp.py @@ -47,6 +47,8 @@ class SVGP(LatentFunctionInference): #rescale the F term if working on a batch F, dF_dmu, dF_dv = F*batch_scale, dF_dmu*batch_scale, dF_dv*batch_scale + if dF_dthetaL is not None: + dF_dthetaL = dF_dthetaL.sum(1)*batch_scale #derivatives of expected likelihood Adv = A.T[:,:,None]*dF_dv[None,:,:] # As if dF_Dv is diagonal diff --git a/GPy/likelihoods/likelihood.py b/GPy/likelihoods/likelihood.py index b1e78b93..0bf9fc6f 100644 --- a/GPy/likelihoods/likelihood.py +++ b/GPy/likelihoods/likelihood.py @@ -177,7 +177,11 @@ class Likelihood(Parameterized): if np.any(np.isnan(dF_dm)) or np.any(np.isinf(dF_dm)): stop - dF_dtheta = None # Not yet implemented + if self.size: + dF_dtheta = self.dlogpdf_dtheta(X, Y[:,None]) # Ntheta x (orig size) x N_{quad_points} + dF_dtheta = np.dot(dF_dtheta, gh_w) + else: + dF_dtheta = None # Not yet implemented return F.reshape(*shape), dF_dm.reshape(*shape), dF_dv.reshape(*shape), dF_dtheta def predictive_mean(self, mu, variance, Y_metadata=None): diff --git a/GPy/likelihoods/student_t.py b/GPy/likelihoods/student_t.py index dbd4d94f..c805d1dd 100644 --- a/GPy/likelihoods/student_t.py +++ b/GPy/likelihoods/student_t.py @@ -180,7 +180,8 @@ class StudentT(Likelihood): :rtype: float """ e = y - inv_link_f - dlogpdf_dvar = self.v*(e**2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e**2)) + e2 = np.square(e) + dlogpdf_dvar = self.v*(e2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e2)) return dlogpdf_dvar def dlogpdf_dlink_dvar(self, inv_link_f, y, Y_metadata=None): @@ -226,7 +227,7 @@ class StudentT(Likelihood): def dlogpdf_link_dtheta(self, f, y, Y_metadata=None): dlogpdf_dvar = self.dlogpdf_link_dvar(f, y, Y_metadata=Y_metadata) dlogpdf_dv = np.zeros_like(dlogpdf_dvar) #FIXME: Not done yet - return np.hstack((dlogpdf_dvar, dlogpdf_dv)) + return np.array((dlogpdf_dvar, dlogpdf_dv)) def dlogpdf_dlink_dtheta(self, f, y, Y_metadata=None): dlogpdf_dlink_dvar = self.dlogpdf_dlink_dvar(f, y, Y_metadata=Y_metadata) From 534e0df6066c50893991f94ebc8f71b8e7fe81de Mon Sep 17 00:00:00 2001 From: James Hensman Date: Tue, 24 Mar 2015 14:11:50 +0000 Subject: [PATCH 2/3] some tests for the svgp, and some changes to the likelihoods --- GPy/likelihoods/bernoulli.py | 2 +- GPy/likelihoods/student_t.py | 4 ++-- GPy/testing/svgp_tests.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 GPy/testing/svgp_tests.py diff --git a/GPy/likelihoods/bernoulli.py b/GPy/likelihoods/bernoulli.py index 26de274b..f5690aa4 100644 --- a/GPy/likelihoods/bernoulli.py +++ b/GPy/likelihoods/bernoulli.py @@ -77,7 +77,7 @@ class Bernoulli(Likelihood): return Z_hat, mu_hat, sigma2_hat - def variational_expectations(self, Y, m, v, gh_points=None): + def variational_expectations(self, Y, m, v, gh_points=None, Y_metadata=None): if isinstance(self.gp_link, link_functions.Probit): if gh_points is None: diff --git a/GPy/likelihoods/student_t.py b/GPy/likelihoods/student_t.py index c805d1dd..97c2286e 100644 --- a/GPy/likelihoods/student_t.py +++ b/GPy/likelihoods/student_t.py @@ -35,8 +35,8 @@ class StudentT(Likelihood): self.log_concave = False - def parameters_changed(self): - self.variance = (self.v / float(self.v - 2)) * self.sigma2 + #def parameters_changed(self): + #self.variance = (self.v / float(self.v - 2)) * self.sigma2 def update_gradients(self, grads): """ diff --git a/GPy/testing/svgp_tests.py b/GPy/testing/svgp_tests.py new file mode 100644 index 00000000..6dc0fa56 --- /dev/null +++ b/GPy/testing/svgp_tests.py @@ -0,0 +1,34 @@ +import numpy as np +import scipy as sp +import GPy + +class SVGP_nonconvex(np.testing.TestCase): + """ + Inference in the SVGP with a student-T likelihood + """ + def setUp(self): + X = np.linspace(0,10,100).reshape(-1,1) + Z = np.linspace(0,10,10).reshape(-1,1) + Y = np.sin(X) + np.random.randn(*X.shape)*0.1 + Y[50] += 3 + + lik = GPy.likelihoods.StudentT(deg_free=2) + k = GPy.kern.RBF(1, lengthscale=5.) + GPy.kern.White(1, 1e-6) + self.m = GPy.core.SVGP(X, Y, Z=Z, likelihood=lik, kernel=k) + def test_grad(self): + assert self.m.checkgrad(step=1e-4) + +class SVGP_classification(np.testing.TestCase): + """ + Inference in the SVGP with a Bernoulli likelihood + """ + def setUp(self): + X = np.linspace(0,10,100).reshape(-1,1) + Z = np.linspace(0,10,10).reshape(-1,1) + Y = np.where((np.sin(X) + np.random.randn(*X.shape)*0.1)>0, 1,0) + + lik = GPy.likelihoods.Bernoulli() + k = GPy.kern.RBF(1, lengthscale=5.) + GPy.kern.White(1, 1e-6) + self.m = GPy.core.SVGP(X, Y, Z=Z, likelihood=lik, kernel=k) + def test_grad(self): + assert self.m.checkgrad(step=1e-4) From e74bfd81c6139206fe2cd1f686db19397f865398 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Wed, 25 Mar 2015 14:22:39 +0000 Subject: [PATCH 3/3] added some clarifying comments with NDL --- .../latent_function_inference/expectation_propagation.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/GPy/inference/latent_function_inference/expectation_propagation.py b/GPy/inference/latent_function_inference/expectation_propagation.py index 26144974..647823bd 100644 --- a/GPy/inference/latent_function_inference/expectation_propagation.py +++ b/GPy/inference/latent_function_inference/expectation_propagation.py @@ -40,8 +40,11 @@ class EP(LatentFunctionInference): K = kern.K(X) if self._ep_approximation is None: + + #if we don't yet have the results of runnign EP, run EP and store the computed factors in self._ep_approximation mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation = self.expectation_propagation(K, Y, likelihood, Y_metadata) else: + #if we've already run EP, just use the existing approximation stored in self._ep_approximation mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde))