moved *2. of psi2 statistics into kern and corrected bias+linear cross term

This commit is contained in:
Max Zwiessele 2013-04-23 15:22:30 +01:00
parent 2c3a53b174
commit f01be172be
3 changed files with 282 additions and 259 deletions

View file

@ -12,18 +12,17 @@ import itertools
from GPy.core import model
class PsiStatModel(model):
def __init__(self, which, X, X_variance, Z, M, kernel, mu_or_S, dL_=numpy.ones((1, 1))):
def __init__(self, which, X, X_variance, Z, M, kernel):
self.which = which
self.dL_ = dL_
self.X = X
self.X_variance = X_variance
self.Z = Z
self.N, self.Q = X.shape
self.M, Q = Z.shape
self.mu_or_S = mu_or_S
assert self.Q == Q, "shape missmatch: Z:{!s} X:{!s}".format(Z.shape, X.shape)
self.kern = kernel
super(PsiStatModel, self).__init__()
self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance)
def _get_param_names(self):
Xnames = ["{}_{}_{}".format(what, i, j) for what, i, j in itertools.product(['X', 'X_variance'], range(self.N), range(self.Q))]
Znames = ["Z_{}_{}".format(i, j) for i, j in itertools.product(range(self.M), range(self.Q))]
@ -41,13 +40,12 @@ class PsiStatModel(model):
def log_likelihood(self):
return self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance).sum()
def _log_likelihood_gradients(self):
psi_ = self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance)
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(psi_), self.Z, self.X, self.X_variance)
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance)
try:
psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(psi_), self.Z, self.X, self.X_variance)
psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance)
except AttributeError:
psiZ = numpy.zeros(self.M * self.Q)
thetagrad = self.kern.__getattribute__("d" + self.which + "_dtheta")(numpy.ones_like(psi_), self.Z, self.X, self.X_variance).flatten()
thetagrad = self.kern.__getattribute__("d" + self.which + "_dtheta")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance).flatten()
return numpy.hstack((psimu.flatten(), psiS.flatten(), psiZ.flatten(), thetagrad))
class Test(unittest.TestCase):
@ -72,15 +70,35 @@ class Test(unittest.TestCase):
def testPsi1(self):
for k in self.kernels:
m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z,
m = PsiStatModel('psi1', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2(self):
for k in self.kernels:
m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2_lin(self):
k = self.kernels[0]
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2_lin_bia(self):
k = self.kernels[3]
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2_rbf(self):
k = self.kernels[1]
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2_rbf_bia(self):
k = self.kernels[-1]
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
def testPsi2_bia(self):
k = self.kernels[2]
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
M=self.M, kernel=k)
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k.parts)))
if __name__ == "__main__":
@ -94,9 +112,13 @@ if __name__ == "__main__":
Y = X.dot(numpy.random.randn(Q, D))
kernel = GPy.kern.linear(Q) # GPy.kern.bias(Q) # GPy.kern.linear(Q) + GPy.kern.rbf(Q)
m0 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z,
M=M, kernel=kernel, mu_or_S=0, dL_=numpy.ones((1)))
M=M, kernel=GPy.kern.linear(Q))
m1 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z,
M=M, kernel=kernel, mu_or_S=0, dL_=numpy.ones((1)))
M=M, kernel=GPy.kern.bias(Q))
m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
M=M, kernel=kernel, mu_or_S=0, dL_=numpy.ones((1, 1, 1)))
M=M, kernel=GPy.kern.rbf(Q))
m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
M=M, kernel=GPy.kern.linear(Q) + GPy.kern.bias(Q))
m4 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
M=M, kernel=GPy.kern.rbf(Q) + GPy.kern.bias(Q))