mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-04 17:22:39 +02:00
very weird merge conflict, including in files that I did not change
This commit is contained in:
commit
601175de2d
73 changed files with 2234 additions and 1567 deletions
|
|
@ -1,85 +0,0 @@
|
|||
# Copyright (c) 2012, Nicolo Fusi
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
|
||||
import unittest
|
||||
import numpy as np
|
||||
import GPy
|
||||
from ..models import BayesianGPLVM
|
||||
|
||||
class BGPLVMTests(unittest.TestCase):
|
||||
def test_bias_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
def test_linear_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
def test_rbf_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
def test_rbf_bias_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
def test_rbf_line_kern(self):
|
||||
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
def test_linear_bias_kern(self):
|
||||
N, num_inducing, input_dim, D = 30, 5, 4, 30
|
||||
X = np.random.rand(N, input_dim)
|
||||
k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||
Y -= Y.mean(axis=0)
|
||||
k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||
m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
unittest.main()
|
||||
|
|
@ -17,24 +17,33 @@ class Test(unittest.TestCase):
|
|||
self.param_index.add(one, [3])
|
||||
self.param_index.add(two, [0,5])
|
||||
self.param_index.add(three, [2,4,7])
|
||||
self.view = ParameterIndexOperationsView(self.param_index, 2, 6)
|
||||
|
||||
def test_clear(self):
|
||||
self.param_index.clear()
|
||||
self.assertDictEqual(self.param_index._properties, {})
|
||||
|
||||
def test_remove(self):
|
||||
self.param_index.remove(three, np.r_[3:10])
|
||||
self.assertListEqual(self.param_index[three].tolist(), [2])
|
||||
self.param_index.remove(one, [1])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||
self.assertListEqual(self.param_index.remove('not in there', []).tolist(), [])
|
||||
self.param_index.remove(one, [9])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||
self.assertListEqual(self.param_index.remove('not in there', [2,3,4]).tolist(), [])
|
||||
|
||||
def test_shift_left(self):
|
||||
self.param_index.shift_left(1, 2)
|
||||
self.view.shift_left(0, 2)
|
||||
self.assertListEqual(self.param_index[three].tolist(), [2,5])
|
||||
self.assertListEqual(self.param_index[two].tolist(), [0,3])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [1])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [])
|
||||
|
||||
def test_shift_right(self):
|
||||
self.param_index.shift_right(5, 2)
|
||||
self.view.shift_right(3, 2)
|
||||
self.assertListEqual(self.param_index[three].tolist(), [2,4,9])
|
||||
self.assertListEqual(self.param_index[two].tolist(), [0,7])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||
self.assertListEqual(self.param_index[one].tolist(), [3])
|
||||
|
||||
def test_index_view(self):
|
||||
#=======================================================================
|
||||
|
|
@ -44,17 +53,17 @@ class Test(unittest.TestCase):
|
|||
# three three three
|
||||
# view: [0 1 2 3 4 5 ]
|
||||
#=======================================================================
|
||||
view = ParameterIndexOperationsView(self.param_index, 2, 6)
|
||||
self.assertSetEqual(set(view.properties()), set([one, two, three]))
|
||||
for v,p in zip(view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
|
||||
self.view = ParameterIndexOperationsView(self.param_index, 2, 6)
|
||||
self.assertSetEqual(set(self.view.properties()), set([one, two, three]))
|
||||
for v,p in zip(self.view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
|
||||
self.assertEqual(v, p)
|
||||
self.assertSetEqual(set(view[two]), set([3]))
|
||||
self.assertSetEqual(set(self.view[two]), set([3]))
|
||||
self.assertSetEqual(set(self.param_index[two]), set([0, 5]))
|
||||
view.add(two, np.array([0]))
|
||||
self.assertSetEqual(set(view[two]), set([0,3]))
|
||||
self.view.add(two, np.array([0]))
|
||||
self.assertSetEqual(set(self.view[two]), set([0,3]))
|
||||
self.assertSetEqual(set(self.param_index[two]), set([0, 2, 5]))
|
||||
view.clear()
|
||||
for v,p in zip(view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
|
||||
self.view.clear()
|
||||
for v,p in zip(self.view.properties_for(np.r_[:6]), self.param_index.properties_for(np.r_[2:2+6])):
|
||||
self.assertEqual(v, p)
|
||||
self.assertEqual(v, [])
|
||||
param_index = ParameterIndexOperations()
|
||||
|
|
@ -62,11 +71,17 @@ class Test(unittest.TestCase):
|
|||
param_index.add(two, [0,5])
|
||||
param_index.add(three, [2,4,7])
|
||||
view2 = ParameterIndexOperationsView(param_index, 2, 6)
|
||||
view.update(view2)
|
||||
self.view.update(view2)
|
||||
for [i,v],[i2,v2] in zip(sorted(param_index.items()), sorted(self.param_index.items())):
|
||||
self.assertEqual(i, i2)
|
||||
self.assertTrue(np.all(v == v2))
|
||||
|
||||
|
||||
def test_misc(self):
|
||||
for k,v in self.param_index.copy()._properties.iteritems():
|
||||
self.assertListEqual(self.param_index[k].tolist(), v.tolist())
|
||||
self.assertEqual(self.param_index.size, 6)
|
||||
self.assertEqual(self.view.size, 5)
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_index_view']
|
||||
unittest.main()
|
||||
|
|
@ -6,7 +6,9 @@ import numpy as np
|
|||
import GPy
|
||||
import sys
|
||||
|
||||
verbose = True
|
||||
verbose = 0
|
||||
|
||||
|
||||
|
||||
class Kern_check_model(GPy.core.Model):
|
||||
"""
|
||||
|
|
@ -31,9 +33,10 @@ class Kern_check_model(GPy.core.Model):
|
|||
self.X2 = X2
|
||||
self.dL_dK = dL_dK
|
||||
|
||||
def is_positive_definite(self):
|
||||
def is_positive_semi_definite(self):
|
||||
v = np.linalg.eig(self.kernel.K(self.X))[0]
|
||||
if any(v<-10*sys.float_info.epsilon):
|
||||
if any(v.real<=-1e-10):
|
||||
print v.real.min()
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
|
@ -87,11 +90,11 @@ class Kern_check_dKdiag_dX(Kern_check_dK_dX):
|
|||
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
|
||||
|
||||
def parameters_changed(self):
|
||||
self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK, self.X)
|
||||
self.X.gradient = self.kernel.gradients_X_diag(self.dL_dK.diagonal(), self.X)
|
||||
|
||||
|
||||
|
||||
def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
||||
def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verbose=False, fixed_X_dims=None):
|
||||
"""
|
||||
This function runs on kernels to check the correctness of their
|
||||
implementation. It checks that the covariance function is positive definite
|
||||
|
|
@ -106,18 +109,18 @@ def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|||
|
||||
"""
|
||||
pass_checks = True
|
||||
if X==None:
|
||||
if X is None:
|
||||
X = np.random.randn(10, kern.input_dim)
|
||||
if output_ind is not None:
|
||||
X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0])
|
||||
if X2==None:
|
||||
if X2 is None:
|
||||
X2 = np.random.randn(20, kern.input_dim)
|
||||
if output_ind is not None:
|
||||
X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0])
|
||||
|
||||
if verbose:
|
||||
print("Checking covariance function is positive definite.")
|
||||
result = Kern_check_model(kern, X=X).is_positive_definite()
|
||||
result = Kern_check_model(kern, X=X).is_positive_semi_definite()
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
|
|
@ -161,7 +164,10 @@ def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|||
if verbose:
|
||||
print("Checking gradients of K(X, X) wrt X.")
|
||||
try:
|
||||
result = Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=verbose)
|
||||
testmodel = Kern_check_dK_dX(kern, X=X, X2=None)
|
||||
if fixed_X_dims is not None:
|
||||
testmodel.X[:,fixed_X_dims].fix()
|
||||
result = testmodel.checkgrad(verbose=verbose)
|
||||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
|
|
@ -170,14 +176,17 @@ def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dX(kern, X=X, X2=None).checkgrad(verbose=True)
|
||||
testmodel.checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("Checking gradients of K(X, X2) wrt X.")
|
||||
try:
|
||||
result = Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=verbose)
|
||||
testmodel = Kern_check_dK_dX(kern, X=X, X2=X2)
|
||||
if fixed_X_dims is not None:
|
||||
testmodel.X[:,fixed_X_dims].fix()
|
||||
result = testmodel.checkgrad(verbose=verbose)
|
||||
except NotImplementedError:
|
||||
result=True
|
||||
if verbose:
|
||||
|
|
@ -185,8 +194,8 @@ def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|||
if result and verbose:
|
||||
print("Check passed.")
|
||||
if not result:
|
||||
print("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
Kern_check_dK_dX(kern, X=X, X2=X2).checkgrad(verbose=True)
|
||||
print("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")
|
||||
testmodel.checkgrad(verbose=True)
|
||||
pass_checks = False
|
||||
return False
|
||||
|
||||
|
|
@ -210,27 +219,137 @@ def kern_test(kern, X=None, X2=None, output_ind=None, verbose=False):
|
|||
|
||||
|
||||
|
||||
class KernelTestsContinuous(unittest.TestCase):
|
||||
class KernelGradientTestsContinuous(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.X = np.random.randn(100,2)
|
||||
self.X2 = np.random.randn(110,2)
|
||||
self.N, self.D = 100, 5
|
||||
self.X = np.random.randn(self.N,self.D)
|
||||
self.X2 = np.random.randn(self.N+10,self.D)
|
||||
|
||||
continuous_kerns = ['RBF', 'Linear']
|
||||
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
|
||||
|
||||
def test_Matern32(self):
|
||||
k = GPy.kern.Matern32(2)
|
||||
self.assertTrue(kern_test(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
k = GPy.kern.Matern32(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
def test_Prod(self):
|
||||
k = GPy.kern.Matern32(2, active_dims=[2,3]) * GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
def test_Add(self):
|
||||
k = GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
def test_Matern52(self):
|
||||
k = GPy.kern.Matern52(2)
|
||||
self.assertTrue(kern_test(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
k = GPy.kern.Matern52(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
#TODO: turn off grad checkingwrt X for indexed kernels liek coregionalize
|
||||
def test_RBF(self):
|
||||
k = GPy.kern.RBF(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
def test_Linear(self):
|
||||
k = GPy.kern.Linear(self.D)
|
||||
k.randomize()
|
||||
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
#TODO: turn off grad checkingwrt X for indexed kernels like coregionalize
|
||||
# class KernelGradientTestsContinuous1D(unittest.TestCase):
|
||||
# def setUp(self):
|
||||
# self.N, self.D = 100, 1
|
||||
# self.X = np.random.randn(self.N,self.D)
|
||||
# self.X2 = np.random.randn(self.N+10,self.D)
|
||||
#
|
||||
# continuous_kerns = ['RBF', 'Linear']
|
||||
# self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
|
||||
#
|
||||
# def test_PeriodicExponential(self):
|
||||
# k = GPy.kern.PeriodicExponential(self.D)
|
||||
# k.randomize()
|
||||
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
#
|
||||
# def test_PeriodicMatern32(self):
|
||||
# k = GPy.kern.PeriodicMatern32(self.D)
|
||||
# k.randomize()
|
||||
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
#
|
||||
# def test_PeriodicMatern52(self):
|
||||
# k = GPy.kern.PeriodicMatern52(self.D)
|
||||
# k.randomize()
|
||||
# self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
|
||||
|
||||
|
||||
class KernelTestsMiscellaneous(unittest.TestCase):
|
||||
def setUp(self):
|
||||
N, D = 100, 10
|
||||
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.ones(D)
|
||||
self.rbf = GPy.kern.RBF(2, active_dims=slice(0,4,2))
|
||||
self.linear = GPy.kern.Linear(2, active_dims=(3,9))
|
||||
self.matern = GPy.kern.Matern32(3, active_dims=np.array([2,4,9]))
|
||||
self.sumkern = self.rbf + self.linear
|
||||
self.sumkern += self.matern
|
||||
self.sumkern.randomize()
|
||||
|
||||
def test_active_dims(self):
|
||||
self.assertEqual(self.sumkern.input_dim, 10)
|
||||
self.assertEqual(self.sumkern.active_dims, slice(0, 10, 1))
|
||||
|
||||
def test_which_parts(self):
|
||||
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X)))
|
||||
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X)))
|
||||
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X)))
|
||||
|
||||
class KernelTestsNonContinuous(unittest.TestCase):
|
||||
def setUp(self):
|
||||
N0 = 3
|
||||
N1 = 9
|
||||
N2 = 4
|
||||
N = N0+N1+N2
|
||||
self.D = 3
|
||||
self.X = np.random.randn(N, self.D+1)
|
||||
indices = np.random.random_integers(0, 2, size=N)
|
||||
self.X[indices==0, -1] = 0
|
||||
self.X[indices==1, -1] = 1
|
||||
self.X[indices==2, -1] = 2
|
||||
#self.X = self.X[self.X[:, -1].argsort(), :]
|
||||
self.X2 = np.random.randn((N0+N1)*2, self.D+1)
|
||||
self.X2[:(N0*2), -1] = 0
|
||||
self.X2[(N0*2):, -1] = 1
|
||||
|
||||
def test_IndependentOutputs(self):
|
||||
k = GPy.kern.RBF(self.D)
|
||||
kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
|
||||
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
|
||||
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
|
||||
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
|
||||
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
unittest.main()
|
||||
#unittest.main()
|
||||
np.random.seed(0)
|
||||
N0 = 3
|
||||
N1 = 9
|
||||
N2 = 4
|
||||
N = N0+N1+N2
|
||||
D = 3
|
||||
X = np.random.randn(N, D+1)
|
||||
indices = np.random.random_integers(0, 2, size=N)
|
||||
X[indices==0, -1] = 0
|
||||
X[indices==1, -1] = 1
|
||||
X[indices==2, -1] = 2
|
||||
#X = X[X[:, -1].argsort(), :]
|
||||
X2 = np.random.randn((N0+N1)*2, D+1)
|
||||
X2[:(N0*2), -1] = 0
|
||||
X2[(N0*2):, -1] = 1
|
||||
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(D, name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
|
||||
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
|
||||
assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
|
||||
k = GPy.kern.RBF(D)
|
||||
kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
|
||||
assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import numpy as np
|
||||
import unittest
|
||||
import GPy
|
||||
from ..models import GradientChecker
|
||||
from GPy.models import GradientChecker
|
||||
import functools
|
||||
import inspect
|
||||
from ..likelihoods import link_functions
|
||||
from ..core.parameterization import Param
|
||||
from GPy.likelihoods import link_functions
|
||||
from GPy.core.parameterization import Param
|
||||
from functools import partial
|
||||
#np.random.seed(300)
|
||||
#np.random.seed(7)
|
||||
|
|
@ -541,7 +541,8 @@ class TestNoiseModels(object):
|
|||
#import ipdb; ipdb.set_trace()
|
||||
#NOTE this test appears to be stochastic for some likelihoods (student t?)
|
||||
# appears to all be working in test mode right now...
|
||||
|
||||
#if isinstance(model, GPy.likelihoods.StudentT):
|
||||
# import ipdb;ipdb.set_trace()
|
||||
assert m.checkgrad(step=step)
|
||||
|
||||
###########
|
||||
|
|
@ -664,12 +665,11 @@ class LaplaceTests(unittest.TestCase):
|
|||
print m1
|
||||
print m2
|
||||
|
||||
m2.parameters_changed()
|
||||
#m2._set_params(m1._get_params())
|
||||
m2[:] = m1[:]
|
||||
|
||||
#Predict for training points to get posterior mean and variance
|
||||
post_mean, post_var, _, _ = m1.predict(X)
|
||||
post_mean_approx, post_var_approx, _, _ = m2.predict(X)
|
||||
post_mean, post_var = m1.predict(X)
|
||||
post_mean_approx, post_var_approx, = m2.predict(X)
|
||||
|
||||
if debug:
|
||||
import pylab as pb
|
||||
|
|
@ -701,8 +701,8 @@ class LaplaceTests(unittest.TestCase):
|
|||
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)
|
||||
#Check marginals are the same with random
|
||||
m1.randomize()
|
||||
#m2._set_params(m1._get_params())
|
||||
m2.parameters_changed()
|
||||
m2[:] = m1[:]
|
||||
|
||||
np.testing.assert_almost_equal(m1.log_likelihood(), m2.log_likelihood(), decimal=2)
|
||||
|
||||
#Check they are checkgradding
|
||||
|
|
|
|||
|
|
@ -1,32 +0,0 @@
|
|||
# Copyright (c) 2013, Max Zwiessele
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
'''
|
||||
Created on 10 Apr 2013
|
||||
|
||||
@author: maxz
|
||||
'''
|
||||
|
||||
import unittest
|
||||
import numpy as np
|
||||
import GPy
|
||||
|
||||
class MRDTests(unittest.TestCase):
|
||||
|
||||
def test_gradients(self):
|
||||
num_m = 3
|
||||
N, num_inducing, input_dim, D = 20, 8, 6, 20
|
||||
X = np.random.rand(N, input_dim)
|
||||
|
||||
k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim)
|
||||
K = k.K(X)
|
||||
|
||||
Ylist = [np.random.multivariate_normal(np.zeros(N), K, input_dim).T for _ in range(num_m)]
|
||||
likelihood_list = [GPy.likelihoods.Gaussian(Y) for Y in Ylist]
|
||||
|
||||
m = GPy.models.MRD(likelihood_list, input_dim=input_dim, kernels=k, num_inducing=num_inducing)
|
||||
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
unittest.main()
|
||||
|
|
@ -8,7 +8,7 @@ from GPy.core.parameterization.parameterized import Parameterized
|
|||
from GPy.core.parameterization.param import Param
|
||||
import numpy
|
||||
|
||||
# One trigger in init
|
||||
# One trigger in init
|
||||
_trigger_start = -1
|
||||
|
||||
class ParamTestParent(Parameterized):
|
||||
|
|
@ -21,11 +21,9 @@ class ParameterizedTest(Parameterized):
|
|||
params_changed_count = _trigger_start
|
||||
def parameters_changed(self):
|
||||
self.params_changed_count += 1
|
||||
def _set_params(self, params, trigger_parent=True):
|
||||
Parameterized._set_params(self, params, trigger_parent=trigger_parent)
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.parent = ParamTestParent('test parent')
|
||||
self.par = ParameterizedTest('test model')
|
||||
|
|
@ -41,12 +39,12 @@ class Test(unittest.TestCase):
|
|||
|
||||
self.parent.add_parameter(self.par)
|
||||
self.parent.add_parameter(self.par2)
|
||||
|
||||
|
||||
self._observer_triggered = None
|
||||
self._trigger_count = 0
|
||||
self._first = None
|
||||
self._second = None
|
||||
|
||||
|
||||
def _trigger(self, which):
|
||||
self._observer_triggered = float(which)
|
||||
self._trigger_count += 1
|
||||
|
|
@ -54,18 +52,18 @@ class Test(unittest.TestCase):
|
|||
self._second = self._trigger
|
||||
else:
|
||||
self._first = self._trigger
|
||||
|
||||
|
||||
def _trigger_priority(self, which):
|
||||
if self._first is not None:
|
||||
self._second = self._trigger_priority
|
||||
else:
|
||||
self._first = self._trigger_priority
|
||||
|
||||
|
||||
def test_observable(self):
|
||||
self.par.add_observer(self, self._trigger, -1)
|
||||
self.assertEqual(self.par.params_changed_count, 0, 'no params changed yet')
|
||||
self.assertEqual(self.par.params_changed_count, self.parent.parent_changed_count, 'parent should be triggered as often as param')
|
||||
|
||||
|
||||
self.p[0,1] = 3 # trigger observers
|
||||
self.assertEqual(self._observer_triggered, 3, 'observer should have triggered')
|
||||
self.assertEqual(self._trigger_count, 1, 'observer should have triggered once')
|
||||
|
|
@ -78,14 +76,14 @@ class Test(unittest.TestCase):
|
|||
self.assertEqual(self._trigger_count, 1, 'observer should have triggered once')
|
||||
self.assertEqual(self.par.params_changed_count, 2, 'params changed second')
|
||||
self.assertEqual(self.par.params_changed_count, self.parent.parent_changed_count, 'parent should be triggered as often as param')
|
||||
|
||||
|
||||
self.par.add_observer(self, self._trigger, -1)
|
||||
self.p[2,1] = 4
|
||||
self.assertEqual(self._observer_triggered, 4, 'observer should have triggered')
|
||||
self.assertEqual(self._trigger_count, 2, 'observer should have triggered once')
|
||||
self.assertEqual(self.par.params_changed_count, 3, 'params changed second')
|
||||
self.assertEqual(self.par.params_changed_count, self.parent.parent_changed_count, 'parent should be triggered as often as param')
|
||||
|
||||
|
||||
self.par.remove_observer(self, self._trigger)
|
||||
self.p[0,1] = 3
|
||||
self.assertEqual(self._observer_triggered, 4, 'observer should not have triggered')
|
||||
|
|
@ -99,7 +97,7 @@ class Test(unittest.TestCase):
|
|||
self.par._trigger_params_changed()
|
||||
self.assertEqual(self.par.params_changed_count, 1, 'now params changed')
|
||||
self.assertEqual(self.parent.parent_changed_count, self.par.params_changed_count)
|
||||
|
||||
|
||||
self.par._param_array_[:] = 2
|
||||
self.par._trigger_params_changed()
|
||||
self.assertEqual(self.par.params_changed_count, 2, 'now params changed')
|
||||
|
|
@ -125,13 +123,13 @@ class Test(unittest.TestCase):
|
|||
|
||||
self.par.remove_observer(self)
|
||||
self._first = self._second = None
|
||||
|
||||
|
||||
self.par.add_observer(self, self._trigger, 1)
|
||||
self.par.add_observer(self, self._trigger_priority, 0)
|
||||
self.par.notify_observers(0)
|
||||
self.assertEqual(self._first, self._trigger, 'priority should be second')
|
||||
self.assertEqual(self._second, self._trigger_priority, 'priority should be second')
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.testName']
|
||||
|
|
|
|||
|
|
@ -7,8 +7,24 @@ import unittest
|
|||
import GPy
|
||||
import numpy as np
|
||||
from GPy.core.parameterization.parameter_core import HierarchyError
|
||||
from GPy.core.parameterization.array_core import ObsAr
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
class ArrayCoreTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.X = np.random.normal(1,1, size=(100,10))
|
||||
self.obsX = ObsAr(self.X)
|
||||
|
||||
def test_init(self):
|
||||
X = ObsAr(self.X)
|
||||
X2 = ObsAr(X)
|
||||
self.assertIs(X, X2, "no new Observable array, when Observable is given")
|
||||
|
||||
def test_slice(self):
|
||||
t1 = self.X[2:78]
|
||||
t2 = self.obsX[2:78]
|
||||
self.assertListEqual(t1.tolist(), t2.tolist(), "Slicing should be the exact same, as in ndarray")
|
||||
|
||||
class ParameterizedTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.rbf = GPy.kern.RBF(1)
|
||||
|
|
@ -16,94 +32,112 @@ class Test(unittest.TestCase):
|
|||
from GPy.core.parameterization import Param
|
||||
from GPy.core.parameterization.transformations import Logistic
|
||||
self.param = Param('param', np.random.rand(25,2), Logistic(0, 1))
|
||||
|
||||
|
||||
self.test1 = GPy.core.Parameterized("test model")
|
||||
self.test1.add_parameter(self.white)
|
||||
self.test1.add_parameter(self.rbf, 0)
|
||||
self.test1.add_parameter(self.param)
|
||||
|
||||
self.test1.kern = self.rbf+self.white
|
||||
self.test1.add_parameter(self.test1.kern)
|
||||
self.test1.add_parameter(self.param, 0)
|
||||
|
||||
x = np.linspace(-2,6,4)[:,None]
|
||||
y = np.sin(x)
|
||||
self.testmodel = GPy.models.GPRegression(x,y)
|
||||
|
||||
|
||||
def test_add_parameter(self):
|
||||
self.assertEquals(self.rbf._parent_index_, 0)
|
||||
self.assertEquals(self.white._parent_index_, 1)
|
||||
self.assertEquals(self.param._parent_index_, 0)
|
||||
pass
|
||||
|
||||
|
||||
def test_fixes(self):
|
||||
self.white.fix(warning=False)
|
||||
self.test1.remove_parameter(self.test1.param)
|
||||
self.test1.remove_parameter(self.param)
|
||||
self.assertTrue(self.test1._has_fixes())
|
||||
|
||||
from GPy.core.parameterization.transformations import FIXED, UNFIXED
|
||||
self.assertListEqual(self.test1._fixes_.tolist(),[UNFIXED,UNFIXED,FIXED])
|
||||
|
||||
self.test1.add_parameter(self.white, 0)
|
||||
self.test1.kern.add_parameter(self.white, 0)
|
||||
self.assertListEqual(self.test1._fixes_.tolist(),[FIXED,UNFIXED,UNFIXED])
|
||||
|
||||
self.test1.kern.rbf.fix()
|
||||
self.assertListEqual(self.test1._fixes_.tolist(),[FIXED]*3)
|
||||
|
||||
def test_remove_parameter(self):
|
||||
from GPy.core.parameterization.transformations import FIXED, UNFIXED, __fixed__, Logexp
|
||||
self.white.fix()
|
||||
self.test1.remove_parameter(self.white)
|
||||
self.test1.kern.remove_parameter(self.white)
|
||||
self.assertIs(self.test1._fixes_,None)
|
||||
|
||||
|
||||
self.assertListEqual(self.white._fixes_.tolist(), [FIXED])
|
||||
self.assertEquals(self.white.constraints._offset, 0)
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
|
||||
self.test1.add_parameter(self.white, 0)
|
||||
self.assertIs(self.test1.constraints, self.white.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
self.assertListEqual(self.test1.constraints[__fixed__].tolist(), [0])
|
||||
self.assertIs(self.white._fixes_,None)
|
||||
self.assertListEqual(self.test1._fixes_.tolist(),[FIXED] + [UNFIXED] * 52)
|
||||
|
||||
|
||||
self.test1.remove_parameter(self.white)
|
||||
self.assertIs(self.test1._fixes_,None)
|
||||
self.assertListEqual(self.white._fixes_.tolist(), [FIXED])
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.param.constraints._param_index_ops)
|
||||
self.assertListEqual(self.test1.constraints[Logexp()].tolist(), [0,1])
|
||||
|
||||
self.assertListEqual(self.test1.constraints[Logexp()].tolist(), range(self.param.size, self.param.size+self.rbf.size))
|
||||
|
||||
def test_remove_parameter_param_array_grad_array(self):
|
||||
val = self.test1.kern._param_array_.copy()
|
||||
self.test1.kern.remove_parameter(self.white)
|
||||
self.assertListEqual(self.test1.kern._param_array_.tolist(), val[:2].tolist())
|
||||
|
||||
def test_add_parameter_already_in_hirarchy(self):
|
||||
self.assertRaises(HierarchyError, self.test1.add_parameter, self.white._parameters_[0])
|
||||
|
||||
self.assertRaises(HierarchyError, self.test1.add_parameter, self.white._parameters_[0])
|
||||
|
||||
def test_default_constraints(self):
|
||||
self.assertIs(self.rbf.variance.constraints._param_index_ops, self.rbf.constraints._param_index_ops)
|
||||
self.assertIs(self.test1.constraints, self.rbf.constraints._param_index_ops)
|
||||
self.assertListEqual(self.rbf.constraints.indices()[0].tolist(), range(2))
|
||||
from GPy.core.parameterization.transformations import Logexp
|
||||
kern = self.rbf+self.white
|
||||
kern = self.test1.kern
|
||||
self.test1.remove_parameter(kern)
|
||||
self.assertListEqual(kern.constraints[Logexp()].tolist(), range(3))
|
||||
|
||||
def test_constraints(self):
|
||||
self.rbf.constrain(GPy.transformations.Square(), False)
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), range(2))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp()].tolist(), [2])
|
||||
|
||||
self.test1.remove_parameter(self.rbf)
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), range(self.param.size, self.param.size+self.rbf.size))
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Logexp()].tolist(), [self.param.size+self.rbf.size])
|
||||
|
||||
self.test1.kern.remove_parameter(self.rbf)
|
||||
self.assertListEqual(self.test1.constraints[GPy.transformations.Square()].tolist(), [])
|
||||
|
||||
def test_constraints_views(self):
|
||||
self.assertEqual(self.white.constraints._offset, 2)
|
||||
self.assertEqual(self.rbf.constraints._offset, 0)
|
||||
self.assertEqual(self.param.constraints._offset, 3)
|
||||
self.assertEqual(self.white.constraints._offset, self.param.size+self.rbf.size)
|
||||
self.assertEqual(self.rbf.constraints._offset, self.param.size)
|
||||
self.assertEqual(self.param.constraints._offset, 0)
|
||||
|
||||
def test_fixing_randomize(self):
|
||||
self.white.fix(warning=False)
|
||||
val = float(self.test1.white.variance)
|
||||
self.white.fix(warning=True)
|
||||
val = float(self.white.variance)
|
||||
self.test1.randomize()
|
||||
self.assertEqual(val, self.white.variance)
|
||||
|
||||
def test_fixing_randomize_parameter_handling(self):
|
||||
self.rbf.fix(warning=True)
|
||||
val = float(self.rbf.variance)
|
||||
self.test1.kern.randomize()
|
||||
self.assertEqual(val, self.rbf.variance)
|
||||
|
||||
def test_fixing_optimize(self):
|
||||
self.testmodel.kern.lengthscale.fix()
|
||||
val = float(self.testmodel.kern.lengthscale)
|
||||
self.testmodel.randomize()
|
||||
self.assertEqual(val, self.testmodel.kern.lengthscale)
|
||||
|
||||
def test_printing(self):
|
||||
print self.test1
|
||||
print self.param
|
||||
print self.test1['']
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.test_add_parameter']
|
||||
unittest.main()
|
||||
|
|
@ -15,7 +15,7 @@ class PriorTests(unittest.TestCase):
|
|||
X, y = X[:, None], y[:, None]
|
||||
m = GPy.models.GPRegression(X, y)
|
||||
lognormal = GPy.priors.LogGaussian(1, 2)
|
||||
m.set_prior('rbf', lognormal)
|
||||
m.rbf.set_prior(lognormal)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -28,7 +28,7 @@ class PriorTests(unittest.TestCase):
|
|||
X, y = X[:, None], y[:, None]
|
||||
m = GPy.models.GPRegression(X, y)
|
||||
Gamma = GPy.priors.Gamma(1, 1)
|
||||
m.set_prior('rbf', Gamma)
|
||||
m.rbf.set_prior(Gamma)
|
||||
m.randomize()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -41,16 +41,9 @@ class PriorTests(unittest.TestCase):
|
|||
X, y = X[:, None], y[:, None]
|
||||
m = GPy.models.GPRegression(X, y)
|
||||
gaussian = GPy.priors.Gaussian(1, 1)
|
||||
success = False
|
||||
|
||||
# setting a Gaussian prior on non-negative parameters
|
||||
# should raise an assertionerror.
|
||||
try:
|
||||
m.set_prior('rbf', gaussian)
|
||||
except AssertionError:
|
||||
success = True
|
||||
|
||||
self.assertTrue(success)
|
||||
self.assertRaises(AssertionError, m.rbf.set_prior, gaussian)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import numpy
|
|||
from GPy.kern import RBF
|
||||
from GPy.kern import Linear
|
||||
from copy import deepcopy
|
||||
from GPy.core.parameterization.variational import NormalPosterior
|
||||
|
||||
__test__ = lambda: 'deep' in sys.argv
|
||||
# np.random.seed(0)
|
||||
|
|
@ -28,53 +29,21 @@ def ard(p):
|
|||
class Test(unittest.TestCase):
|
||||
input_dim = 9
|
||||
num_inducing = 13
|
||||
N = 300
|
||||
N = 1000
|
||||
Nsamples = 1e6
|
||||
|
||||
def setUp(self):
|
||||
i_s_dim_list = [2,4,3]
|
||||
indices = numpy.cumsum(i_s_dim_list).tolist()
|
||||
input_slices = [slice(a,b) for a,b in zip([None]+indices, indices)]
|
||||
#input_slices[2] = deepcopy(input_slices[1])
|
||||
input_slice_kern = GPy.kern.kern(9,
|
||||
[
|
||||
RBF(i_s_dim_list[0], np.random.rand(), np.random.rand(i_s_dim_list[0]), ARD=True),
|
||||
RBF(i_s_dim_list[1], np.random.rand(), np.random.rand(i_s_dim_list[1]), ARD=True),
|
||||
Linear(i_s_dim_list[2], np.random.rand(i_s_dim_list[2]), ARD=True)
|
||||
],
|
||||
input_slices = input_slices
|
||||
)
|
||||
self.kerns = (
|
||||
# input_slice_kern,
|
||||
# (GPy.kern.rbf(self.input_dim, ARD=True) +
|
||||
# GPy.kern.linear(self.input_dim, ARD=True) +
|
||||
# GPy.kern.bias(self.input_dim) +
|
||||
# GPy.kern.white(self.input_dim)),
|
||||
(#GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True)
|
||||
GPy.kern.Linear(self.input_dim, np.random.rand(self.input_dim), ARD=True)
|
||||
+GPy.kern.RBF(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True)
|
||||
# +GPy.kern.bias(self.input_dim)
|
||||
# +GPy.kern.white(self.input_dim)),
|
||||
),
|
||||
# (GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True) +
|
||||
# GPy.kern.bias(self.input_dim, np.random.rand())),
|
||||
# (GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True)
|
||||
# +GPy.kern.rbf(self.input_dim, np.random.rand(), np.random.rand(self.input_dim), ARD=True)
|
||||
# #+GPy.kern.bias(self.input_dim, np.random.rand())
|
||||
# #+GPy.kern.white(self.input_dim, np.random.rand())),
|
||||
# ),
|
||||
# GPy.kern.white(self.input_dim, np.random.rand())),
|
||||
# GPy.kern.rbf(self.input_dim), GPy.kern.rbf(self.input_dim, ARD=True),
|
||||
# GPy.kern.linear(self.input_dim, ARD=False), GPy.kern.linear(self.input_dim, ARD=True),
|
||||
# GPy.kern.linear(self.input_dim) + GPy.kern.bias(self.input_dim),
|
||||
# GPy.kern.rbf(self.input_dim) + GPy.kern.bias(self.input_dim),
|
||||
# GPy.kern.linear(self.input_dim) + GPy.kern.bias(self.input_dim) + GPy.kern.white(self.input_dim),
|
||||
# GPy.kern.rbf(self.input_dim) + GPy.kern.bias(self.input_dim) + GPy.kern.white(self.input_dim),
|
||||
# GPy.kern.bias(self.input_dim), GPy.kern.white(self.input_dim),
|
||||
#GPy.kern.RBF([0,1,2], ARD=True)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
|
||||
#GPy.kern.RBF(self.input_dim)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
|
||||
#GPy.kern.Linear(self.input_dim) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
|
||||
#GPy.kern.Linear(self.input_dim, ARD=True) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
|
||||
GPy.kern.Linear([1,3,6,7], ARD=True) + GPy.kern.RBF([0,5,8], ARD=True) + GPy.kern.White(self.input_dim),
|
||||
)
|
||||
self.q_x_mean = np.random.randn(self.input_dim)
|
||||
self.q_x_variance = np.exp(np.random.randn(self.input_dim))
|
||||
self.q_x_mean = np.random.randn(self.input_dim)[None]
|
||||
self.q_x_variance = np.exp(.5*np.random.randn(self.input_dim))[None]
|
||||
self.q_x_samples = np.random.randn(self.Nsamples, self.input_dim) * np.sqrt(self.q_x_variance) + self.q_x_mean
|
||||
self.q_x = NormalPosterior(self.q_x_mean, self.q_x_variance)
|
||||
self.Z = np.random.randn(self.num_inducing, self.input_dim)
|
||||
self.q_x_mean.shape = (1, self.input_dim)
|
||||
self.q_x_variance.shape = (1, self.input_dim)
|
||||
|
|
@ -114,8 +83,9 @@ class Test(unittest.TestCase):
|
|||
|
||||
def test_psi2(self):
|
||||
for kern in self.kerns:
|
||||
kern.randomize()
|
||||
Nsamples = int(np.floor(self.Nsamples/self.N))
|
||||
psi2 = kern.psi2(self.Z, self.q_x_mean, self.q_x_variance)
|
||||
psi2 = kern.psi2(self.Z, self.q_x)
|
||||
K_ = np.zeros((self.num_inducing, self.num_inducing))
|
||||
diffs = []
|
||||
for i, q_x_sample_stripe in enumerate(np.array_split(self.q_x_samples, self.Nsamples / Nsamples)):
|
||||
|
|
@ -130,8 +100,8 @@ class Test(unittest.TestCase):
|
|||
pylab.figure(msg)
|
||||
pylab.plot(diffs, marker='x', mew=.2)
|
||||
# print msg, np.allclose(psi2.squeeze(), K_, rtol=1e-1, atol=.1)
|
||||
self.assertTrue(np.allclose(psi2.squeeze(), K_),
|
||||
#rtol=1e-1, atol=.1),
|
||||
self.assertTrue(np.allclose(psi2.squeeze(), K_,
|
||||
atol=.1, rtol=1),
|
||||
msg=msg + ": not matching")
|
||||
# sys.stdout.write(".")
|
||||
except:
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import itertools
|
|||
from GPy.core import Model
|
||||
from GPy.core.parameterization.param import Param
|
||||
from GPy.core.parameterization.transformations import Logexp
|
||||
from GPy.core.parameterization.variational import NormalPosterior
|
||||
|
||||
class PsiStatModel(Model):
|
||||
def __init__(self, which, X, X_variance, Z, num_inducing, kernel):
|
||||
|
|
@ -18,23 +19,24 @@ class PsiStatModel(Model):
|
|||
self.which = which
|
||||
self.X = Param("X", X)
|
||||
self.X_variance = Param('X_variance', X_variance, Logexp())
|
||||
self.q = NormalPosterior(self.X, self.X_variance)
|
||||
self.Z = Param("Z", Z)
|
||||
self.N, self.input_dim = X.shape
|
||||
self.num_inducing, input_dim = Z.shape
|
||||
assert self.input_dim == input_dim, "shape missmatch: Z:{!s} X:{!s}".format(Z.shape, X.shape)
|
||||
self.kern = kernel
|
||||
self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance)
|
||||
self.add_parameters(self.X, self.X_variance, self.Z, self.kern)
|
||||
self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.q)
|
||||
self.add_parameters(self.q, self.Z, self.kern)
|
||||
|
||||
def log_likelihood(self):
|
||||
return self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance).sum()
|
||||
|
||||
def parameters_changed(self):
|
||||
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance)
|
||||
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||
self.X.gradient = psimu
|
||||
self.X_variance.gradient = psiS
|
||||
#psimu, psiS = numpy.ones(self.N * self.input_dim), numpy.ones(self.N * self.input_dim)
|
||||
try: psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.X, self.X_variance)
|
||||
try: psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||
except AttributeError: psiZ = numpy.zeros_like(self.Z)
|
||||
self.Z.gradient = psiZ
|
||||
#psiZ = numpy.ones(self.num_inducing * self.input_dim)
|
||||
|
|
@ -176,6 +178,6 @@ if __name__ == "__main__":
|
|||
+GPy.kern.White(input_dim)
|
||||
)
|
||||
)
|
||||
m2.ensure_default_constraints()
|
||||
#m2.ensure_default_constraints()
|
||||
else:
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class GradientTests(unittest.TestCase):
|
|||
model_fit = getattr(GPy.models, model_type)
|
||||
|
||||
# noise = GPy.kern.White(dimension)
|
||||
kern = kern # + noise
|
||||
kern = kern # + noise
|
||||
if uncertain_inputs:
|
||||
m = model_fit(X, Y, kernel=kern, X_variance=np.random.rand(X.shape[0], X.shape[1]))
|
||||
else:
|
||||
|
|
@ -60,13 +60,14 @@ class GradientTests(unittest.TestCase):
|
|||
|
||||
def test_GPRegression_mlp_1d(self):
|
||||
''' Testing the GP regression with mlp kernel with white kernel on 1d data '''
|
||||
mlp = GPy.kern.mlp(1)
|
||||
mlp = GPy.kern.MLP(1)
|
||||
self.check_model(mlp, model_type='GPRegression', dimension=1)
|
||||
|
||||
def test_GPRegression_poly_1d(self):
|
||||
''' Testing the GP regression with polynomial kernel with white kernel on 1d data '''
|
||||
mlp = GPy.kern.Poly(1, degree=5)
|
||||
self.check_model(mlp, model_type='GPRegression', dimension=1)
|
||||
#TODO:
|
||||
#def test_GPRegression_poly_1d(self):
|
||||
# ''' Testing the GP regression with polynomial kernel with white kernel on 1d data '''
|
||||
# mlp = GPy.kern.Poly(1, degree=5)
|
||||
# self.check_model(mlp, model_type='GPRegression', dimension=1)
|
||||
|
||||
def test_GPRegression_matern52_1D(self):
|
||||
''' Testing the GP regression with matern52 kernel on 1d data '''
|
||||
|
|
@ -163,14 +164,14 @@ class GradientTests(unittest.TestCase):
|
|||
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
|
||||
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2)
|
||||
|
||||
#@unittest.expectedFailure
|
||||
# @unittest.expectedFailure
|
||||
def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self):
|
||||
''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs'''
|
||||
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
|
||||
raise unittest.SkipTest("This is not implemented yet!")
|
||||
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1)
|
||||
|
||||
#@unittest.expectedFailure
|
||||
# @unittest.expectedFailure
|
||||
def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self):
|
||||
''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs'''
|
||||
rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1)
|
||||
|
|
@ -202,7 +203,7 @@ class GradientTests(unittest.TestCase):
|
|||
X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None]
|
||||
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
||||
kernel = GPy.kern.RBF(1)
|
||||
m = GPy.models.GPClassification(X,Y,kernel=kernel)
|
||||
m = GPy.models.GPClassification(X, Y, kernel=kernel)
|
||||
m.update_likelihood_approximation()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -212,11 +213,11 @@ class GradientTests(unittest.TestCase):
|
|||
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
||||
Z = np.linspace(0, 15, 4)[:, None]
|
||||
kernel = GPy.kern.RBF(1)
|
||||
m = GPy.models.SparseGPClassification(X,Y,kernel=kernel,Z=Z)
|
||||
#distribution = GPy.likelihoods.likelihood_functions.Bernoulli()
|
||||
#likelihood = GPy.likelihoods.EP(Y, distribution)
|
||||
#m = GPy.core.SparseGP(X, likelihood, kernel, Z)
|
||||
#m.ensure_default_constraints()
|
||||
m = GPy.models.SparseGPClassification(X, Y, kernel=kernel, Z=Z)
|
||||
# distribution = GPy.likelihoods.likelihood_functions.Bernoulli()
|
||||
# likelihood = GPy.likelihoods.EP(Y, distribution)
|
||||
# m = GPy.core.SparseGP(X, likelihood, kernel, Z)
|
||||
# m.ensure_default_constraints()
|
||||
m.update_likelihood_approximation()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -224,8 +225,8 @@ class GradientTests(unittest.TestCase):
|
|||
N = 20
|
||||
X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None]
|
||||
k = GPy.kern.RBF(1) + GPy.kern.White(1)
|
||||
Y = np.hstack([np.ones(N/2),np.zeros(N/2)])[:,None]
|
||||
m = GPy.models.FITCClassification(X, Y, kernel = k)
|
||||
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
||||
m = GPy.models.FITCClassification(X, Y, kernel=k)
|
||||
m.update_likelihood_approximation()
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -238,7 +239,7 @@ class GradientTests(unittest.TestCase):
|
|||
Y = np.vstack((Y1, Y2))
|
||||
|
||||
k1 = GPy.kern.RBF(1)
|
||||
m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1])
|
||||
m = GPy.models.GPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
|
||||
m.constrain_fixed('.*rbf_var', 1.)
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
@ -251,7 +252,7 @@ class GradientTests(unittest.TestCase):
|
|||
Y = np.vstack((Y1, Y2))
|
||||
|
||||
k1 = GPy.kern.RBF(1)
|
||||
m = GPy.models.SparseGPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1])
|
||||
m = GPy.models.SparseGPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
|
||||
m.constrain_fixed('.*rbf_var', 1.)
|
||||
self.assertTrue(m.checkgrad())
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue