mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-04-27 22:06:22 +02:00
migrate tests to pytest
This commit is contained in:
parent
5bab8ca976
commit
cddde60988
5 changed files with 212 additions and 110 deletions
|
|
@ -17,7 +17,7 @@ class BCGPLVMTests(unittest.TestCase):
|
|||
mapping = GPy.mappings.Kernel(output_dim=input_dim, X=Y, kernel=bk)
|
||||
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_linear_backconstraint(self):
|
||||
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||
|
|
@ -30,7 +30,7 @@ class BCGPLVMTests(unittest.TestCase):
|
|||
mapping = GPy.mappings.Linear(output_dim=input_dim, input_dim=output_dim)
|
||||
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_mlp_backconstraint(self):
|
||||
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||
|
|
@ -43,7 +43,7 @@ class BCGPLVMTests(unittest.TestCase):
|
|||
mapping = GPy.mappings.MLP(output_dim=input_dim, input_dim=output_dim, hidden_dim=[5, 4, 7])
|
||||
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_linear_kern(self):
|
||||
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||
|
|
@ -26,7 +26,7 @@ class GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_rbf_kern(self):
|
||||
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||
|
|
@ -37,7 +37,7 @@ class GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
'''
|
||||
"""
|
||||
Created on 22 Apr 2013
|
||||
|
||||
@author: maxz
|
||||
'''
|
||||
"""
|
||||
import unittest
|
||||
import numpy
|
||||
|
||||
|
|
@ -13,42 +13,66 @@ from GPy.core.parameterization.param import Param
|
|||
from GPy.core.parameterization.transformations import Logexp
|
||||
from GPy.core.parameterization.variational import NormalPosterior
|
||||
|
||||
|
||||
class PsiStatModel(Model):
|
||||
def __init__(self, which, X, X_variance, Z, num_inducing, kernel):
|
||||
super(PsiStatModel, self).__init__(name='psi stat test')
|
||||
super(PsiStatModel, self).__init__(name="psi stat test")
|
||||
self.which = which
|
||||
self.X = Param("X", X)
|
||||
self.X_variance = Param('X_variance', X_variance, Logexp())
|
||||
self.X_variance = Param("X_variance", X_variance, Logexp())
|
||||
self.q = NormalPosterior(self.X, self.X_variance)
|
||||
self.Z = Param("Z", Z)
|
||||
self.N, self.input_dim = X.shape
|
||||
self.num_inducing, input_dim = Z.shape
|
||||
assert self.input_dim == input_dim, "shape missmatch: Z:{!s} X:{!s}".format(Z.shape, X.shape)
|
||||
assert self.input_dim == input_dim, "shape missmatch: Z:{!s} X:{!s}".format(
|
||||
Z.shape, X.shape
|
||||
)
|
||||
self.kern = kernel
|
||||
self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.q)
|
||||
self.add_parameters(self.q, self.Z, self.kern)
|
||||
|
||||
def log_likelihood(self):
|
||||
return self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance).sum()
|
||||
return self.kern.__getattribute__(self.which)(
|
||||
self.Z, self.X, self.X_variance
|
||||
).sum()
|
||||
|
||||
def parameters_changed(self):
|
||||
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(
|
||||
numpy.ones_like(self.psi_), self.Z, self.q
|
||||
)
|
||||
self.X.gradient = psimu
|
||||
self.X_variance.gradient = psiS
|
||||
#psimu, psiS = numpy.ones(self.N * self.input_dim), numpy.ones(self.N * self.input_dim)
|
||||
try: psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||
except AttributeError: psiZ = numpy.zeros_like(self.Z)
|
||||
# psimu, psiS = numpy.ones(self.N * self.input_dim), numpy.ones(self.N * self.input_dim)
|
||||
try:
|
||||
psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(
|
||||
numpy.ones_like(self.psi_), self.Z, self.q
|
||||
)
|
||||
except AttributeError:
|
||||
psiZ = numpy.zeros_like(self.Z)
|
||||
self.Z.gradient = psiZ
|
||||
#psiZ = numpy.ones(self.num_inducing * self.input_dim)
|
||||
N,M = self.X.shape[0], self.Z.shape[0]
|
||||
dL_dpsi0, dL_dpsi1, dL_dpsi2 = numpy.zeros([N]), numpy.zeros([N,M]), numpy.zeros([N,M,M])
|
||||
if self.which == 'psi0': dL_dpsi0 += 1
|
||||
if self.which == 'psi1': dL_dpsi1 += 1
|
||||
if self.which == 'psi2': dL_dpsi2 += 1
|
||||
self.kern.update_gradients_variational(numpy.zeros([1,1]),
|
||||
dL_dpsi0,
|
||||
dL_dpsi1,
|
||||
dL_dpsi2, self.X, self.X_variance, self.Z)
|
||||
# psiZ = numpy.ones(self.num_inducing * self.input_dim)
|
||||
N, M = self.X.shape[0], self.Z.shape[0]
|
||||
dL_dpsi0, dL_dpsi1, dL_dpsi2 = (
|
||||
numpy.zeros([N]),
|
||||
numpy.zeros([N, M]),
|
||||
numpy.zeros([N, M, M]),
|
||||
)
|
||||
if self.which == "psi0":
|
||||
dL_dpsi0 += 1
|
||||
if self.which == "psi1":
|
||||
dL_dpsi1 += 1
|
||||
if self.which == "psi2":
|
||||
dL_dpsi2 += 1
|
||||
self.kern.update_gradients_variational(
|
||||
numpy.zeros([1, 1]),
|
||||
dL_dpsi0,
|
||||
dL_dpsi1,
|
||||
dL_dpsi2,
|
||||
self.X,
|
||||
self.X_variance,
|
||||
self.Z,
|
||||
)
|
||||
|
||||
|
||||
class DPsiStatTest(unittest.TestCase):
|
||||
input_dim = 5
|
||||
|
|
@ -56,128 +80,206 @@ class DPsiStatTest(unittest.TestCase):
|
|||
num_inducing = 10
|
||||
input_dim = 20
|
||||
X = numpy.random.randn(N, input_dim)
|
||||
X_var = .5 * numpy.ones_like(X) + .4 * numpy.clip(numpy.random.randn(*X.shape), 0, 1)
|
||||
X_var = 0.5 * numpy.ones_like(X) + 0.4 * numpy.clip(
|
||||
numpy.random.randn(*X.shape), 0, 1
|
||||
)
|
||||
Z = numpy.random.permutation(X)[:num_inducing]
|
||||
Y = X.dot(numpy.random.randn(input_dim, input_dim))
|
||||
# kernels = [GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)), GPy.kern.RBF(input_dim, ARD=True), GPy.kern.Bias(input_dim)]
|
||||
# kernels = [GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)), GPy.kern.RBF(input_dim, ARD=True), GPy.kern.Bias(input_dim)]
|
||||
|
||||
kernels = [
|
||||
GPy.kern.Linear(input_dim),
|
||||
GPy.kern.RBF(input_dim),
|
||||
#GPy.kern.Bias(input_dim),
|
||||
#GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||
#GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)
|
||||
]
|
||||
GPy.kern.Linear(input_dim),
|
||||
GPy.kern.RBF(input_dim),
|
||||
# GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)
|
||||
]
|
||||
|
||||
def testPsi0(self):
|
||||
for k in self.kernels:
|
||||
m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z,\
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi0",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi0".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi0".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi1(self):
|
||||
for k in self.kernels:
|
||||
m = PsiStatModel('psi1', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi1",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi1".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi2_lin(self):
|
||||
k = self.kernels[0]
|
||||
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi2",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi2".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi2_lin_bia(self):
|
||||
k = self.kernels[3]
|
||||
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi2",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi2".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi2_rbf(self):
|
||||
k = self.kernels[1]
|
||||
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi2",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi2".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi2_rbf_bia(self):
|
||||
k = self.kernels[-1]
|
||||
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi2",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi2".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
def testPsi2_bia(self):
|
||||
k = self.kernels[2]
|
||||
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||
num_inducing=self.num_inducing, kernel=k)
|
||||
m = PsiStatModel(
|
||||
"psi2",
|
||||
X=self.X,
|
||||
X_variance=self.X_var,
|
||||
Z=self.Z,
|
||||
num_inducing=self.num_inducing,
|
||||
kernel=k,
|
||||
)
|
||||
m.randomize()
|
||||
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||
assert m.checkgrad(), "{} x psi2".format(
|
||||
"+".join(map(lambda x: x.name, k._parameters_))
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
interactive = 'i' in sys.argv
|
||||
|
||||
interactive = "i" in sys.argv
|
||||
if interactive:
|
||||
# N, num_inducing, input_dim, input_dim = 30, 5, 4, 30
|
||||
# X = numpy.random.rand(N, input_dim)
|
||||
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
# K = k.K(X)
|
||||
# Y = numpy.random.multivariate_normal(numpy.zeros(N), K, input_dim).T
|
||||
# Y -= Y.mean(axis=0)
|
||||
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
# m = GPy.models.Bayesian_GPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
# m.randomize()
|
||||
# # self.assertTrue(m.checkgrad())
|
||||
# N, num_inducing, input_dim, input_dim = 30, 5, 4, 30
|
||||
# X = numpy.random.rand(N, input_dim)
|
||||
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
# K = k.K(X)
|
||||
# Y = numpy.random.multivariate_normal(numpy.zeros(N), K, input_dim).T
|
||||
# Y -= Y.mean(axis=0)
|
||||
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
# m = GPy.models.Bayesian_GPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
# m.randomize()
|
||||
# # assert m.checkgrad()
|
||||
numpy.random.seed(0)
|
||||
input_dim = 3
|
||||
N = 3
|
||||
num_inducing = 2
|
||||
D = 15
|
||||
X = numpy.random.randn(N, input_dim)
|
||||
X_var = .5 * numpy.ones_like(X) + .1 * numpy.clip(numpy.random.randn(*X.shape), 0, 1)
|
||||
X_var = 0.5 * numpy.ones_like(X) + 0.1 * numpy.clip(
|
||||
numpy.random.randn(*X.shape), 0, 1
|
||||
)
|
||||
Z = numpy.random.permutation(X)[:num_inducing]
|
||||
Y = X.dot(numpy.random.randn(input_dim, D))
|
||||
# kernel = GPy.kern.Bias(input_dim)
|
||||
#
|
||||
# kernels = [GPy.kern.Linear(input_dim), GPy.kern.RBF(input_dim), GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)]
|
||||
# kernel = GPy.kern.Bias(input_dim)
|
||||
#
|
||||
# kernels = [GPy.kern.Linear(input_dim), GPy.kern.RBF(input_dim), GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||
# GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)]
|
||||
|
||||
# for k in kernels:
|
||||
# m = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=k)
|
||||
# assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts)))
|
||||
#
|
||||
m0 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z,
|
||||
num_inducing=num_inducing, kernel=GPy.kern.RBF(input_dim)+GPy.kern.Bias(input_dim))
|
||||
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=kernel)
|
||||
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=kernel)
|
||||
# m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=GPy.kern.RBF(input_dim))
|
||||
# m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)))
|
||||
# for k in kernels:
|
||||
# m = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=k)
|
||||
# assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts)))
|
||||
#
|
||||
m0 = PsiStatModel(
|
||||
"psi0",
|
||||
X=X,
|
||||
X_variance=X_var,
|
||||
Z=Z,
|
||||
num_inducing=num_inducing,
|
||||
kernel=GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim),
|
||||
)
|
||||
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=kernel)
|
||||
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=kernel)
|
||||
# m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=GPy.kern.RBF(input_dim))
|
||||
# m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing, kernel=GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)))
|
||||
# + GPy.kern.Bias(input_dim))
|
||||
# m = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing,
|
||||
# kernel=(
|
||||
# GPy.kern.RBF(input_dim, ARD=1)
|
||||
# +GPy.kern.Linear(input_dim, ARD=1)
|
||||
# +GPy.kern.Bias(input_dim))
|
||||
# )
|
||||
# m.ensure_default_constraints()
|
||||
m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
num_inducing=num_inducing, kernel=(
|
||||
GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1)
|
||||
#+GPy.kern.Linear(input_dim, numpy.random.rand(input_dim), ARD=1)
|
||||
#+GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1)
|
||||
#+GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(), ARD=0)
|
||||
+GPy.kern.Bias(input_dim)
|
||||
+GPy.kern.White(input_dim)
|
||||
)
|
||||
)
|
||||
#m2.ensure_default_constraints()
|
||||
# m = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||
# num_inducing=num_inducing,
|
||||
# kernel=(
|
||||
# GPy.kern.RBF(input_dim, ARD=1)
|
||||
# +GPy.kern.Linear(input_dim, ARD=1)
|
||||
# +GPy.kern.Bias(input_dim))
|
||||
# )
|
||||
# m.ensure_default_constraints()
|
||||
m2 = PsiStatModel(
|
||||
"psi2",
|
||||
X=X,
|
||||
X_variance=X_var,
|
||||
Z=Z,
|
||||
num_inducing=num_inducing,
|
||||
kernel=(
|
||||
GPy.kern.RBF(
|
||||
input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1
|
||||
)
|
||||
# +GPy.kern.Linear(input_dim, numpy.random.rand(input_dim), ARD=1)
|
||||
# +GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1)
|
||||
# +GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(), ARD=0)
|
||||
+ GPy.kern.Bias(input_dim)
|
||||
+ GPy.kern.White(input_dim)
|
||||
),
|
||||
)
|
||||
# m2.ensure_default_constraints()
|
||||
else:
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class sparse_GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_linear_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
|
|
@ -27,7 +27,7 @@ class sparse_GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
def test_rbf_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
|
|
@ -38,7 +38,7 @@ class sparse_GPLVMTests(unittest.TestCase):
|
|||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
assert m.checkgrad()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class ListDictTestCase:
|
|||
|
||||
class TestPickleSupport(ListDictTestCase):
|
||||
# TODO: why is this test skipped?
|
||||
@pytest.mark.skip("") # TODO
|
||||
@pytest.mark.skip(reason="")
|
||||
def test_load_pickle(self):
|
||||
import os
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue