mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 11:32:39 +02:00
adding kernels flattening and parameters already in hierarchy
This commit is contained in:
parent
9c553ba15c
commit
6637eb7ac8
11 changed files with 624 additions and 9 deletions
|
|
@ -43,7 +43,7 @@ class ExactGaussianInference(object):
|
||||||
K = kern.K(X)
|
K = kern.K(X)
|
||||||
|
|
||||||
Ky = K.copy()
|
Ky = K.copy()
|
||||||
diag.add(Ky, likelihood.gaussian_variance(Y, Y_metadata))
|
diag.add(Ky, likelihood.gaussian_variance(Y_metadata))
|
||||||
Wi, LW, LWi, W_logdet = pdinv(Ky)
|
Wi, LW, LWi, W_logdet = pdinv(Ky)
|
||||||
|
|
||||||
alpha, _ = dpotrs(LW, YYT_factor, lower=1)
|
alpha, _ = dpotrs(LW, YYT_factor, lower=1)
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,7 @@ class VarDTC(object):
|
||||||
_, output_dim = Y.shape
|
_, output_dim = Y.shape
|
||||||
|
|
||||||
#see whether we've got a different noise variance for each datum
|
#see whether we've got a different noise variance for each datum
|
||||||
beta = 1./np.fmax(likelihood.gaussian_variance(Y, Y_metadata), 1e-6)
|
beta = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6)
|
||||||
# VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency!
|
# VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency!
|
||||||
#self.YYTfactor = self.get_YYTfactor(Y)
|
#self.YYTfactor = self.get_YYTfactor(Y)
|
||||||
#VVT_factor = self.get_VVTfactor(self.YYTfactor, beta)
|
#VVT_factor = self.get_VVTfactor(self.YYTfactor, beta)
|
||||||
|
|
|
||||||
|
|
@ -170,4 +170,11 @@ class Add(CombinationKernel):
|
||||||
def _setstate(self, state):
|
def _setstate(self, state):
|
||||||
super(Add, self)._setstate(state)
|
super(Add, self)._setstate(state)
|
||||||
|
|
||||||
|
def add(self, other, name='sum'):
|
||||||
|
if isinstance(other, Add):
|
||||||
|
other_params = other._parameters_.copy()
|
||||||
|
for p in other_params:
|
||||||
|
other.remove_parameter(p)
|
||||||
|
self.add_parameters(*other_params)
|
||||||
|
else: self.add_parameter(other)
|
||||||
|
return self
|
||||||
|
|
@ -140,12 +140,7 @@ class Kern(Parameterized):
|
||||||
"""
|
"""
|
||||||
assert isinstance(other, Kern), "only kernels can be added to kernels..."
|
assert isinstance(other, Kern), "only kernels can be added to kernels..."
|
||||||
from add import Add
|
from add import Add
|
||||||
kernels = []
|
return Add([self, other], name=name)
|
||||||
if isinstance(self, Add): kernels.extend(self._parameters_)
|
|
||||||
else: kernels.append(self)
|
|
||||||
if isinstance(other, Add): kernels.extend(other._parameters_)
|
|
||||||
else: kernels.append(other)
|
|
||||||
return Add(kernels, name=name)
|
|
||||||
|
|
||||||
def __mul__(self, other):
|
def __mul__(self, other):
|
||||||
""" Here we overload the '*' operator. See self.prod for more information"""
|
""" Here we overload the '*' operator. See self.prod for more information"""
|
||||||
|
|
|
||||||
50
GPy/old_tests/bcgplvm_tests.py
Normal file
50
GPy/old_tests/bcgplvm_tests.py
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
# Copyright (c) 2013, GPy authors (see AUTHORS.txt)
|
||||||
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import numpy as np
|
||||||
|
import GPy
|
||||||
|
|
||||||
|
class BCGPLVMTests(unittest.TestCase):
|
||||||
|
def test_kernel_backconstraint(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim)
|
||||||
|
bk = GPy.kern.rbf(output_dim)
|
||||||
|
mapping = GPy.mappings.Kernel(output_dim=input_dim, X=Y, kernel=bk)
|
||||||
|
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_linear_backconstraint(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim)
|
||||||
|
bk = GPy.kern.rbf(output_dim)
|
||||||
|
mapping = GPy.mappings.Linear(output_dim=input_dim, input_dim=output_dim)
|
||||||
|
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_mlp_backconstraint(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.mlp(input_dim) + GPy.kern.bias(input_dim)
|
||||||
|
bk = GPy.kern.rbf(output_dim)
|
||||||
|
mapping = GPy.mappings.MLP(output_dim=input_dim, input_dim=output_dim, hidden_dim=[5, 4, 7])
|
||||||
|
m = GPy.models.BCGPLVM(Y, input_dim, kernel = k, mapping=mapping)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print "Running unit tests, please be (very) patient..."
|
||||||
|
unittest.main()
|
||||||
110
GPy/old_tests/cgd_tests.py
Normal file
110
GPy/old_tests/cgd_tests.py
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
'''
|
||||||
|
Created on 26 Apr 2013
|
||||||
|
|
||||||
|
@author: maxz
|
||||||
|
'''
|
||||||
|
import unittest
|
||||||
|
import numpy
|
||||||
|
from GPy.inference.optimization.conjugate_gradient_descent import CGD, RUNNING
|
||||||
|
import pylab
|
||||||
|
from scipy.optimize.optimize import rosen, rosen_der
|
||||||
|
from GPy.inference.optimization.gradient_descent_update_rules import PolakRibiere
|
||||||
|
|
||||||
|
|
||||||
|
class Test(unittest.TestCase):
|
||||||
|
|
||||||
|
def testMinimizeSquare(self):
|
||||||
|
N = 100
|
||||||
|
A = numpy.random.rand(N) * numpy.eye(N)
|
||||||
|
b = numpy.random.rand(N) * 0
|
||||||
|
f = lambda x: numpy.dot(x.T.dot(A), x) - numpy.dot(x.T, b)
|
||||||
|
df = lambda x: numpy.dot(A, x) - b
|
||||||
|
|
||||||
|
opt = CGD()
|
||||||
|
|
||||||
|
restarts = 10
|
||||||
|
for _ in range(restarts):
|
||||||
|
try:
|
||||||
|
x0 = numpy.random.randn(N) * 10
|
||||||
|
res = opt.opt(f, df, x0, messages=0, maxiter=1000, gtol=1e-15)
|
||||||
|
assert numpy.allclose(res[0], 0, atol=1e-5)
|
||||||
|
break
|
||||||
|
except AssertionError:
|
||||||
|
import pdb;pdb.set_trace()
|
||||||
|
# RESTART
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise AssertionError("Test failed for {} restarts".format(restarts))
|
||||||
|
|
||||||
|
def testRosen(self):
|
||||||
|
N = 20
|
||||||
|
f = rosen
|
||||||
|
df = rosen_der
|
||||||
|
|
||||||
|
opt = CGD()
|
||||||
|
|
||||||
|
restarts = 10
|
||||||
|
for _ in range(restarts):
|
||||||
|
try:
|
||||||
|
x0 = (numpy.random.randn(N) * .5) + numpy.ones(N)
|
||||||
|
res = opt.opt(f, df, x0, messages=0,
|
||||||
|
maxiter=1e3, gtol=1e-12)
|
||||||
|
assert numpy.allclose(res[0], 1, atol=.1)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
# RESTART
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise AssertionError("Test failed for {} restarts".format(restarts))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# import sys;sys.argv = ['',
|
||||||
|
# 'Test.testMinimizeSquare',
|
||||||
|
# 'Test.testRosen',
|
||||||
|
# ]
|
||||||
|
# unittest.main()
|
||||||
|
|
||||||
|
N = 2
|
||||||
|
A = numpy.random.rand(N) * numpy.eye(N)
|
||||||
|
b = numpy.random.rand(N) * 0
|
||||||
|
f = lambda x: numpy.dot(x.T.dot(A), x) - numpy.dot(x.T, b)
|
||||||
|
df = lambda x: numpy.dot(A, x) - b
|
||||||
|
# f = rosen
|
||||||
|
# df = rosen_der
|
||||||
|
x0 = (numpy.random.randn(N) * .5) + numpy.ones(N)
|
||||||
|
print x0
|
||||||
|
|
||||||
|
opt = CGD()
|
||||||
|
|
||||||
|
pylab.ion()
|
||||||
|
fig = pylab.figure("cgd optimize")
|
||||||
|
if fig.axes:
|
||||||
|
ax = fig.axes[0]
|
||||||
|
ax.cla()
|
||||||
|
else:
|
||||||
|
ax = fig.add_subplot(111, projection='3d')
|
||||||
|
|
||||||
|
interpolation = 40
|
||||||
|
# x, y = numpy.linspace(.5, 1.5, interpolation)[:, None], numpy.linspace(.5, 1.5, interpolation)[:, None]
|
||||||
|
x, y = numpy.linspace(-1, 1, interpolation)[:, None], numpy.linspace(-1, 1, interpolation)[:, None]
|
||||||
|
X, Y = numpy.meshgrid(x, y)
|
||||||
|
fXY = numpy.array([f(numpy.array([x, y])) for x, y in zip(X.flatten(), Y.flatten())]).reshape(interpolation, interpolation)
|
||||||
|
|
||||||
|
ax.plot_wireframe(X, Y, fXY)
|
||||||
|
xopts = [x0.copy()]
|
||||||
|
optplts, = ax.plot3D([x0[0]], [x0[1]], zs=f(x0), marker='', color='r')
|
||||||
|
|
||||||
|
raw_input("enter to start optimize")
|
||||||
|
res = [0]
|
||||||
|
|
||||||
|
def callback(*r):
|
||||||
|
xopts.append(r[0].copy())
|
||||||
|
# time.sleep(.3)
|
||||||
|
optplts._verts3d = [numpy.array(xopts)[:, 0], numpy.array(xopts)[:, 1], [f(xs) for xs in xopts]]
|
||||||
|
fig.canvas.draw()
|
||||||
|
if r[-1] != RUNNING:
|
||||||
|
res[0] = r
|
||||||
|
|
||||||
|
res[0] = opt.opt(f, df, x0.copy(), callback, messages=True, maxiter=1000,
|
||||||
|
report_every=7, gtol=1e-12, update_rule=PolakRibiere)
|
||||||
|
|
||||||
61
GPy/old_tests/gp_transformation_tests.py
Normal file
61
GPy/old_tests/gp_transformation_tests.py
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
from nose.tools import with_setup
|
||||||
|
from GPy.models import GradientChecker
|
||||||
|
from GPy.likelihoods.noise_models import gp_transformations
|
||||||
|
import inspect
|
||||||
|
import unittest
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
class TestTransformations(object):
|
||||||
|
"""
|
||||||
|
Generic transformations checker
|
||||||
|
"""
|
||||||
|
def setUp(self):
|
||||||
|
N = 30
|
||||||
|
self.fs = [np.random.rand(N, 1), float(np.random.rand(1))]
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.fs = None
|
||||||
|
|
||||||
|
def test_transformations(self):
|
||||||
|
self.setUp()
|
||||||
|
transformations = [gp_transformations.Identity(),
|
||||||
|
gp_transformations.Log(),
|
||||||
|
gp_transformations.Probit(),
|
||||||
|
gp_transformations.Log_ex_1(),
|
||||||
|
gp_transformations.Reciprocal(),
|
||||||
|
]
|
||||||
|
|
||||||
|
for transformation in transformations:
|
||||||
|
for f in self.fs:
|
||||||
|
yield self.t_dtransf_df, transformation, f
|
||||||
|
yield self.t_d2transf_df2, transformation, f
|
||||||
|
yield self.t_d3transf_df3, transformation, f
|
||||||
|
|
||||||
|
@with_setup(setUp, tearDown)
|
||||||
|
def t_dtransf_df(self, transformation, f):
|
||||||
|
print "\n{}".format(inspect.stack()[0][3])
|
||||||
|
grad = GradientChecker(transformation.transf, transformation.dtransf_df, f, 'f')
|
||||||
|
grad.randomize()
|
||||||
|
grad.checkgrad(verbose=1)
|
||||||
|
assert grad.checkgrad()
|
||||||
|
|
||||||
|
@with_setup(setUp, tearDown)
|
||||||
|
def t_d2transf_df2(self, transformation, f):
|
||||||
|
print "\n{}".format(inspect.stack()[0][3])
|
||||||
|
grad = GradientChecker(transformation.dtransf_df, transformation.d2transf_df2, f, 'f')
|
||||||
|
grad.randomize()
|
||||||
|
grad.checkgrad(verbose=1)
|
||||||
|
assert grad.checkgrad()
|
||||||
|
|
||||||
|
@with_setup(setUp, tearDown)
|
||||||
|
def t_d3transf_df3(self, transformation, f):
|
||||||
|
print "\n{}".format(inspect.stack()[0][3])
|
||||||
|
grad = GradientChecker(transformation.d2transf_df2, transformation.d3transf_df3, f, 'f')
|
||||||
|
grad.randomize()
|
||||||
|
grad.checkgrad(verbose=1)
|
||||||
|
assert grad.checkgrad()
|
||||||
|
|
||||||
|
#if __name__ == "__main__":
|
||||||
|
#print "Running unit tests"
|
||||||
|
#unittest.main()
|
||||||
44
GPy/old_tests/gplvm_tests.py
Normal file
44
GPy/old_tests/gplvm_tests.py
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
# Copyright (c) 2012, Nicolo Fusi
|
||||||
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import numpy as np
|
||||||
|
import GPy
|
||||||
|
|
||||||
|
class GPLVMTests(unittest.TestCase):
|
||||||
|
def test_bias_kern(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_linear_kern(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_rbf_kern(self):
|
||||||
|
num_data, num_inducing, input_dim, output_dim = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(num_data, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(num_data),K,output_dim).T
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = GPy.models.GPLVM(Y, input_dim, kernel = k)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print "Running unit tests, please be (very) patient..."
|
||||||
|
unittest.main()
|
||||||
120
GPy/old_tests/psi_stat_expectation_tests.py
Normal file
120
GPy/old_tests/psi_stat_expectation_tests.py
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
'''
|
||||||
|
Created on 26 Apr 2013
|
||||||
|
|
||||||
|
@author: maxz
|
||||||
|
'''
|
||||||
|
import unittest
|
||||||
|
import GPy
|
||||||
|
import numpy as np
|
||||||
|
from GPy import testing
|
||||||
|
import sys
|
||||||
|
import numpy
|
||||||
|
from GPy.kern import RBF
|
||||||
|
from GPy.kern import Linear
|
||||||
|
from copy import deepcopy
|
||||||
|
from GPy.core.parameterization.variational import NormalPosterior
|
||||||
|
|
||||||
|
__test__ = lambda: 'deep' in sys.argv
|
||||||
|
# np.random.seed(0)
|
||||||
|
|
||||||
|
def ard(p):
|
||||||
|
try:
|
||||||
|
if p.ARD:
|
||||||
|
return "ARD"
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@testing.deepTest(__test__())
|
||||||
|
class Test(unittest.TestCase):
|
||||||
|
input_dim = 9
|
||||||
|
num_inducing = 13
|
||||||
|
N = 1000
|
||||||
|
Nsamples = 1e6
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.kerns = (
|
||||||
|
#GPy.kern.RBF([0,1,2], ARD=True)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
|
||||||
|
#GPy.kern.RBF(self.input_dim)+GPy.kern.Bias(self.input_dim)+GPy.kern.White(self.input_dim),
|
||||||
|
#GPy.kern.Linear(self.input_dim) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
|
||||||
|
#GPy.kern.Linear(self.input_dim, ARD=True) + GPy.kern.Bias(self.input_dim) + GPy.kern.White(self.input_dim),
|
||||||
|
GPy.kern.Linear([1,3,6,7], ARD=True) + GPy.kern.RBF([0,5,8], ARD=True) + GPy.kern.White(self.input_dim),
|
||||||
|
)
|
||||||
|
self.q_x_mean = np.random.randn(self.input_dim)[None]
|
||||||
|
self.q_x_variance = np.exp(.5*np.random.randn(self.input_dim))[None]
|
||||||
|
self.q_x_samples = np.random.randn(self.Nsamples, self.input_dim) * np.sqrt(self.q_x_variance) + self.q_x_mean
|
||||||
|
self.q_x = NormalPosterior(self.q_x_mean, self.q_x_variance)
|
||||||
|
self.Z = np.random.randn(self.num_inducing, self.input_dim)
|
||||||
|
self.q_x_mean.shape = (1, self.input_dim)
|
||||||
|
self.q_x_variance.shape = (1, self.input_dim)
|
||||||
|
|
||||||
|
def test_psi0(self):
|
||||||
|
for kern in self.kerns:
|
||||||
|
psi0 = kern.psi0(self.Z, self.q_x_mean, self.q_x_variance)
|
||||||
|
Kdiag = kern.Kdiag(self.q_x_samples)
|
||||||
|
self.assertAlmostEqual(psi0, np.mean(Kdiag), 1)
|
||||||
|
# print kern.parts[0].name, np.allclose(psi0, np.mean(Kdiag))
|
||||||
|
|
||||||
|
def test_psi1(self):
|
||||||
|
for kern in self.kerns:
|
||||||
|
Nsamples = np.floor(self.Nsamples/self.N)
|
||||||
|
psi1 = kern.psi1(self.Z, self.q_x_mean, self.q_x_variance)
|
||||||
|
K_ = np.zeros((Nsamples, self.num_inducing))
|
||||||
|
diffs = []
|
||||||
|
for i, q_x_sample_stripe in enumerate(np.array_split(self.q_x_samples, self.Nsamples / Nsamples)):
|
||||||
|
K = kern.K(q_x_sample_stripe[:Nsamples], self.Z)
|
||||||
|
K_ += K
|
||||||
|
diffs.append((np.abs(psi1 - (K_ / (i + 1)))**2).mean())
|
||||||
|
K_ /= self.Nsamples / Nsamples
|
||||||
|
msg = "psi1: " + "+".join([p.name + ard(p) for p in kern.parts])
|
||||||
|
try:
|
||||||
|
import pylab
|
||||||
|
pylab.figure(msg)
|
||||||
|
pylab.plot(diffs)
|
||||||
|
# print msg, ((psi1.squeeze() - K_)**2).mean() < .01
|
||||||
|
self.assertTrue(((psi1.squeeze() - K_)**2).mean() < .01,
|
||||||
|
msg=msg + ": not matching")
|
||||||
|
# sys.stdout.write(".")
|
||||||
|
except:
|
||||||
|
# import ipdb;ipdb.set_trace()
|
||||||
|
# kern.psi2(self.Z, self.q_x_mean, self.q_x_variance)
|
||||||
|
# sys.stdout.write("E") # msg + ": not matching"
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_psi2(self):
|
||||||
|
for kern in self.kerns:
|
||||||
|
kern.randomize()
|
||||||
|
Nsamples = int(np.floor(self.Nsamples/self.N))
|
||||||
|
psi2 = kern.psi2(self.Z, self.q_x)
|
||||||
|
K_ = np.zeros((self.num_inducing, self.num_inducing))
|
||||||
|
diffs = []
|
||||||
|
for i, q_x_sample_stripe in enumerate(np.array_split(self.q_x_samples, self.Nsamples / Nsamples)):
|
||||||
|
K = kern.K(q_x_sample_stripe, self.Z)
|
||||||
|
K = (K[:, :, None] * K[:, None, :])
|
||||||
|
K_ += K.sum(0) / self.Nsamples
|
||||||
|
diffs.append(((psi2 - (K_*self.Nsamples/((i+1)*Nsamples)))**2).mean())
|
||||||
|
#K_ /= self.Nsamples / Nsamples
|
||||||
|
msg = "psi2: {}".format("+".join([p.name + ard(p) for p in kern.parts]))
|
||||||
|
try:
|
||||||
|
import pylab
|
||||||
|
pylab.figure(msg)
|
||||||
|
pylab.plot(diffs, marker='x', mew=.2)
|
||||||
|
# print msg, np.allclose(psi2.squeeze(), K_, rtol=1e-1, atol=.1)
|
||||||
|
self.assertTrue(np.allclose(psi2.squeeze(), K_,
|
||||||
|
atol=.1, rtol=1),
|
||||||
|
msg=msg + ": not matching")
|
||||||
|
# sys.stdout.write(".")
|
||||||
|
except:
|
||||||
|
# kern.psi2(self.Z, self.q_x_mean, self.q_x_variance)
|
||||||
|
# sys.stdout.write("E")
|
||||||
|
print msg + ": not matching"
|
||||||
|
import ipdb;ipdb.set_trace()
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.argv = ['',
|
||||||
|
#'Test.test_psi0',
|
||||||
|
#'Test.test_psi1',
|
||||||
|
'Test.test_psi2',
|
||||||
|
]
|
||||||
|
unittest.main()
|
||||||
183
GPy/old_tests/psi_stat_gradient_tests.py
Normal file
183
GPy/old_tests/psi_stat_gradient_tests.py
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
'''
|
||||||
|
Created on 22 Apr 2013
|
||||||
|
|
||||||
|
@author: maxz
|
||||||
|
'''
|
||||||
|
import unittest
|
||||||
|
import numpy
|
||||||
|
|
||||||
|
import GPy
|
||||||
|
import itertools
|
||||||
|
from GPy.core import Model
|
||||||
|
from GPy.core.parameterization.param import Param
|
||||||
|
from GPy.core.parameterization.transformations import Logexp
|
||||||
|
from GPy.core.parameterization.variational import NormalPosterior
|
||||||
|
|
||||||
|
class PsiStatModel(Model):
|
||||||
|
def __init__(self, which, X, X_variance, Z, num_inducing, kernel):
|
||||||
|
super(PsiStatModel, self).__init__(name='psi stat test')
|
||||||
|
self.which = which
|
||||||
|
self.X = Param("X", X)
|
||||||
|
self.X_variance = Param('X_variance', X_variance, Logexp())
|
||||||
|
self.q = NormalPosterior(self.X, self.X_variance)
|
||||||
|
self.Z = Param("Z", Z)
|
||||||
|
self.N, self.input_dim = X.shape
|
||||||
|
self.num_inducing, input_dim = Z.shape
|
||||||
|
assert self.input_dim == input_dim, "shape missmatch: Z:{!s} X:{!s}".format(Z.shape, X.shape)
|
||||||
|
self.kern = kernel
|
||||||
|
self.psi_ = self.kern.__getattribute__(self.which)(self.Z, self.q)
|
||||||
|
self.add_parameters(self.q, self.Z, self.kern)
|
||||||
|
|
||||||
|
def log_likelihood(self):
|
||||||
|
return self.kern.__getattribute__(self.which)(self.Z, self.X, self.X_variance).sum()
|
||||||
|
|
||||||
|
def parameters_changed(self):
|
||||||
|
psimu, psiS = self.kern.__getattribute__("d" + self.which + "_dmuS")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||||
|
self.X.gradient = psimu
|
||||||
|
self.X_variance.gradient = psiS
|
||||||
|
#psimu, psiS = numpy.ones(self.N * self.input_dim), numpy.ones(self.N * self.input_dim)
|
||||||
|
try: psiZ = self.kern.__getattribute__("d" + self.which + "_dZ")(numpy.ones_like(self.psi_), self.Z, self.q)
|
||||||
|
except AttributeError: psiZ = numpy.zeros_like(self.Z)
|
||||||
|
self.Z.gradient = psiZ
|
||||||
|
#psiZ = numpy.ones(self.num_inducing * self.input_dim)
|
||||||
|
N,M = self.X.shape[0], self.Z.shape[0]
|
||||||
|
dL_dpsi0, dL_dpsi1, dL_dpsi2 = numpy.zeros([N]), numpy.zeros([N,M]), numpy.zeros([N,M,M])
|
||||||
|
if self.which == 'psi0': dL_dpsi0 += 1
|
||||||
|
if self.which == 'psi1': dL_dpsi1 += 1
|
||||||
|
if self.which == 'psi2': dL_dpsi2 += 1
|
||||||
|
self.kern.update_gradients_variational(numpy.zeros([1,1]),
|
||||||
|
dL_dpsi0,
|
||||||
|
dL_dpsi1,
|
||||||
|
dL_dpsi2, self.X, self.X_variance, self.Z)
|
||||||
|
|
||||||
|
class DPsiStatTest(unittest.TestCase):
|
||||||
|
input_dim = 5
|
||||||
|
N = 50
|
||||||
|
num_inducing = 10
|
||||||
|
input_dim = 20
|
||||||
|
X = numpy.random.randn(N, input_dim)
|
||||||
|
X_var = .5 * numpy.ones_like(X) + .4 * numpy.clip(numpy.random.randn(*X.shape), 0, 1)
|
||||||
|
Z = numpy.random.permutation(X)[:num_inducing]
|
||||||
|
Y = X.dot(numpy.random.randn(input_dim, input_dim))
|
||||||
|
# kernels = [GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)), GPy.kern.RBF(input_dim, ARD=True), GPy.kern.Bias(input_dim)]
|
||||||
|
|
||||||
|
kernels = [
|
||||||
|
GPy.kern.Linear(input_dim),
|
||||||
|
GPy.kern.RBF(input_dim),
|
||||||
|
#GPy.kern.Bias(input_dim),
|
||||||
|
#GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||||
|
#GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)
|
||||||
|
]
|
||||||
|
|
||||||
|
def testPsi0(self):
|
||||||
|
for k in self.kernels:
|
||||||
|
m = PsiStatModel('psi0', X=self.X, X_variance=self.X_var, Z=self.Z,\
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi0".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
|
||||||
|
def testPsi1(self):
|
||||||
|
for k in self.kernels:
|
||||||
|
m = PsiStatModel('psi1', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
|
||||||
|
def testPsi2_lin(self):
|
||||||
|
k = self.kernels[0]
|
||||||
|
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
def testPsi2_lin_bia(self):
|
||||||
|
k = self.kernels[3]
|
||||||
|
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
def testPsi2_rbf(self):
|
||||||
|
k = self.kernels[1]
|
||||||
|
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
def testPsi2_rbf_bia(self):
|
||||||
|
k = self.kernels[-1]
|
||||||
|
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
def testPsi2_bia(self):
|
||||||
|
k = self.kernels[2]
|
||||||
|
m = PsiStatModel('psi2', X=self.X, X_variance=self.X_var, Z=self.Z,
|
||||||
|
num_inducing=self.num_inducing, kernel=k)
|
||||||
|
m.randomize()
|
||||||
|
assert m.checkgrad(), "{} x psi2".format("+".join(map(lambda x: x.name, k._parameters_)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import sys
|
||||||
|
interactive = 'i' in sys.argv
|
||||||
|
if interactive:
|
||||||
|
# N, num_inducing, input_dim, input_dim = 30, 5, 4, 30
|
||||||
|
# X = numpy.random.rand(N, input_dim)
|
||||||
|
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
# K = k.K(X)
|
||||||
|
# Y = numpy.random.multivariate_normal(numpy.zeros(N), K, input_dim).T
|
||||||
|
# Y -= Y.mean(axis=0)
|
||||||
|
# k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
# m = GPy.models.Bayesian_GPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||||
|
# m.randomize()
|
||||||
|
# # self.assertTrue(m.checkgrad())
|
||||||
|
numpy.random.seed(0)
|
||||||
|
input_dim = 3
|
||||||
|
N = 3
|
||||||
|
num_inducing = 2
|
||||||
|
D = 15
|
||||||
|
X = numpy.random.randn(N, input_dim)
|
||||||
|
X_var = .5 * numpy.ones_like(X) + .1 * numpy.clip(numpy.random.randn(*X.shape), 0, 1)
|
||||||
|
Z = numpy.random.permutation(X)[:num_inducing]
|
||||||
|
Y = X.dot(numpy.random.randn(input_dim, D))
|
||||||
|
# kernel = GPy.kern.Bias(input_dim)
|
||||||
|
#
|
||||||
|
# kernels = [GPy.kern.Linear(input_dim), GPy.kern.RBF(input_dim), GPy.kern.Bias(input_dim),
|
||||||
|
# GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim),
|
||||||
|
# GPy.kern.RBF(input_dim) + GPy.kern.Bias(input_dim)]
|
||||||
|
|
||||||
|
# for k in kernels:
|
||||||
|
# m = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing, kernel=k)
|
||||||
|
# assert m.checkgrad(), "{} x psi1".format("+".join(map(lambda x: x.name, k.parts)))
|
||||||
|
#
|
||||||
|
m0 = PsiStatModel('psi0', X=X, X_variance=X_var, Z=Z,
|
||||||
|
num_inducing=num_inducing, kernel=GPy.kern.RBF(input_dim)+GPy.kern.Bias(input_dim))
|
||||||
|
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing, kernel=kernel)
|
||||||
|
# m1 = PsiStatModel('psi1', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing, kernel=kernel)
|
||||||
|
# m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing, kernel=GPy.kern.RBF(input_dim))
|
||||||
|
# m3 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing, kernel=GPy.kern.Linear(input_dim, ARD=True, variances=numpy.random.rand(input_dim)))
|
||||||
|
# + GPy.kern.Bias(input_dim))
|
||||||
|
# m = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||||
|
# num_inducing=num_inducing,
|
||||||
|
# kernel=(
|
||||||
|
# GPy.kern.RBF(input_dim, ARD=1)
|
||||||
|
# +GPy.kern.Linear(input_dim, ARD=1)
|
||||||
|
# +GPy.kern.Bias(input_dim))
|
||||||
|
# )
|
||||||
|
# m.ensure_default_constraints()
|
||||||
|
m2 = PsiStatModel('psi2', X=X, X_variance=X_var, Z=Z,
|
||||||
|
num_inducing=num_inducing, kernel=(
|
||||||
|
GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1)
|
||||||
|
#+GPy.kern.Linear(input_dim, numpy.random.rand(input_dim), ARD=1)
|
||||||
|
#+GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(input_dim), ARD=1)
|
||||||
|
#+GPy.kern.RBF(input_dim, numpy.random.rand(), numpy.random.rand(), ARD=0)
|
||||||
|
+GPy.kern.Bias(input_dim)
|
||||||
|
+GPy.kern.White(input_dim)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
#m2.ensure_default_constraints()
|
||||||
|
else:
|
||||||
|
unittest.main()
|
||||||
45
GPy/old_tests/sparse_gplvm_tests.py
Normal file
45
GPy/old_tests/sparse_gplvm_tests.py
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Copyright (c) 2012, Nicolo Fusi, James Hensman
|
||||||
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import numpy as np
|
||||||
|
import GPy
|
||||||
|
from ..models import SparseGPLVM
|
||||||
|
|
||||||
|
class sparse_GPLVMTests(unittest.TestCase):
|
||||||
|
def test_bias_kern(self):
|
||||||
|
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(N, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||||
|
k = GPy.kern.Bias(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_linear_kern(self):
|
||||||
|
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(N, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||||
|
k = GPy.kern.Linear(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
def test_rbf_kern(self):
|
||||||
|
N, num_inducing, input_dim, D = 10, 3, 2, 4
|
||||||
|
X = np.random.rand(N, input_dim)
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
K = k.K(X)
|
||||||
|
Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
|
||||||
|
k = GPy.kern.RBF(input_dim) + GPy.kern.White(input_dim, 0.00001)
|
||||||
|
m = SparseGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
|
||||||
|
m.randomize()
|
||||||
|
self.assertTrue(m.checkgrad())
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print "Running unit tests, please be (very) patient..."
|
||||||
|
unittest.main()
|
||||||
Loading…
Add table
Add a link
Reference in a new issue