mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 11:32:39 +02:00
Merge branch 'devel' of github.com:/sheffieldml/GPy into mean_functions
This commit is contained in:
commit
254157ce04
6 changed files with 50 additions and 6 deletions
|
|
@ -41,8 +41,11 @@ class EP(LatentFunctionInference):
|
|||
K = kern.K(X)
|
||||
|
||||
if self._ep_approximation is None:
|
||||
|
||||
#if we don't yet have the results of runnign EP, run EP and store the computed factors in self._ep_approximation
|
||||
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation = self.expectation_propagation(K, Y, likelihood, Y_metadata)
|
||||
else:
|
||||
#if we've already run EP, just use the existing approximation stored in self._ep_approximation
|
||||
mu, Sigma, mu_tilde, tau_tilde, Z_hat = self._ep_approximation
|
||||
|
||||
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde))
|
||||
|
|
|
|||
|
|
@ -48,6 +48,8 @@ class SVGP(LatentFunctionInference):
|
|||
|
||||
#rescale the F term if working on a batch
|
||||
F, dF_dmu, dF_dv = F*batch_scale, dF_dmu*batch_scale, dF_dv*batch_scale
|
||||
if dF_dthetaL is not None:
|
||||
dF_dthetaL = dF_dthetaL.sum(1)*batch_scale
|
||||
|
||||
#derivatives of expected likelihood
|
||||
Adv = A.T[:,:,None]*dF_dv[None,:,:] # As if dF_Dv is diagonal
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class Bernoulli(Likelihood):
|
|||
|
||||
return Z_hat, mu_hat, sigma2_hat
|
||||
|
||||
def variational_expectations(self, Y, m, v, gh_points=None):
|
||||
def variational_expectations(self, Y, m, v, gh_points=None, Y_metadata=None):
|
||||
if isinstance(self.gp_link, link_functions.Probit):
|
||||
|
||||
if gh_points is None:
|
||||
|
|
|
|||
|
|
@ -177,7 +177,11 @@ class Likelihood(Parameterized):
|
|||
if np.any(np.isnan(dF_dm)) or np.any(np.isinf(dF_dm)):
|
||||
stop
|
||||
|
||||
dF_dtheta = None # Not yet implemented
|
||||
if self.size:
|
||||
dF_dtheta = self.dlogpdf_dtheta(X, Y[:,None]) # Ntheta x (orig size) x N_{quad_points}
|
||||
dF_dtheta = np.dot(dF_dtheta, gh_w)
|
||||
else:
|
||||
dF_dtheta = None # Not yet implemented
|
||||
return F.reshape(*shape), dF_dm.reshape(*shape), dF_dv.reshape(*shape), dF_dtheta
|
||||
|
||||
def predictive_mean(self, mu, variance, Y_metadata=None):
|
||||
|
|
|
|||
|
|
@ -35,8 +35,8 @@ class StudentT(Likelihood):
|
|||
|
||||
self.log_concave = False
|
||||
|
||||
def parameters_changed(self):
|
||||
self.variance = (self.v / float(self.v - 2)) * self.sigma2
|
||||
#def parameters_changed(self):
|
||||
#self.variance = (self.v / float(self.v - 2)) * self.sigma2
|
||||
|
||||
def update_gradients(self, grads):
|
||||
"""
|
||||
|
|
@ -180,7 +180,8 @@ class StudentT(Likelihood):
|
|||
:rtype: float
|
||||
"""
|
||||
e = y - inv_link_f
|
||||
dlogpdf_dvar = self.v*(e**2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e**2))
|
||||
e2 = np.square(e)
|
||||
dlogpdf_dvar = self.v*(e2 - self.sigma2)/(2*self.sigma2*(self.sigma2*self.v + e2))
|
||||
return dlogpdf_dvar
|
||||
|
||||
def dlogpdf_dlink_dvar(self, inv_link_f, y, Y_metadata=None):
|
||||
|
|
@ -226,7 +227,7 @@ class StudentT(Likelihood):
|
|||
def dlogpdf_link_dtheta(self, f, y, Y_metadata=None):
|
||||
dlogpdf_dvar = self.dlogpdf_link_dvar(f, y, Y_metadata=Y_metadata)
|
||||
dlogpdf_dv = np.zeros_like(dlogpdf_dvar) #FIXME: Not done yet
|
||||
return np.hstack((dlogpdf_dvar, dlogpdf_dv))
|
||||
return np.array((dlogpdf_dvar, dlogpdf_dv))
|
||||
|
||||
def dlogpdf_dlink_dtheta(self, f, y, Y_metadata=None):
|
||||
dlogpdf_dlink_dvar = self.dlogpdf_dlink_dvar(f, y, Y_metadata=Y_metadata)
|
||||
|
|
|
|||
34
GPy/testing/svgp_tests.py
Normal file
34
GPy/testing/svgp_tests.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import numpy as np
|
||||
import scipy as sp
|
||||
import GPy
|
||||
|
||||
class SVGP_nonconvex(np.testing.TestCase):
|
||||
"""
|
||||
Inference in the SVGP with a student-T likelihood
|
||||
"""
|
||||
def setUp(self):
|
||||
X = np.linspace(0,10,100).reshape(-1,1)
|
||||
Z = np.linspace(0,10,10).reshape(-1,1)
|
||||
Y = np.sin(X) + np.random.randn(*X.shape)*0.1
|
||||
Y[50] += 3
|
||||
|
||||
lik = GPy.likelihoods.StudentT(deg_free=2)
|
||||
k = GPy.kern.RBF(1, lengthscale=5.) + GPy.kern.White(1, 1e-6)
|
||||
self.m = GPy.core.SVGP(X, Y, Z=Z, likelihood=lik, kernel=k)
|
||||
def test_grad(self):
|
||||
assert self.m.checkgrad(step=1e-4)
|
||||
|
||||
class SVGP_classification(np.testing.TestCase):
|
||||
"""
|
||||
Inference in the SVGP with a Bernoulli likelihood
|
||||
"""
|
||||
def setUp(self):
|
||||
X = np.linspace(0,10,100).reshape(-1,1)
|
||||
Z = np.linspace(0,10,10).reshape(-1,1)
|
||||
Y = np.where((np.sin(X) + np.random.randn(*X.shape)*0.1)>0, 1,0)
|
||||
|
||||
lik = GPy.likelihoods.Bernoulli()
|
||||
k = GPy.kern.RBF(1, lengthscale=5.) + GPy.kern.White(1, 1e-6)
|
||||
self.m = GPy.core.SVGP(X, Y, Z=Z, likelihood=lik, kernel=k)
|
||||
def test_grad(self):
|
||||
assert self.m.checkgrad(step=1e-4)
|
||||
Loading…
Add table
Add a link
Reference in a new issue