2012-11-29 16:39:20 +00:00
|
|
|
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
|
|
|
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
|
|
|
|
|
|
|
|
|
|
2012-11-29 16:28:11 +00:00
|
|
|
import unittest
|
|
|
|
|
import numpy as np
|
|
|
|
|
import GPy
|
|
|
|
|
|
|
|
|
|
class GradientTests(unittest.TestCase):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
######################################
|
2013-06-05 13:02:03 +01:00
|
|
|
# # 1 dimensional example
|
2012-11-29 16:28:11 +00:00
|
|
|
|
|
|
|
|
# sample inputs and outputs
|
2013-06-05 13:02:03 +01:00
|
|
|
self.X1D = np.random.uniform(-3., 3., (20, 1))
|
|
|
|
|
self.Y1D = np.sin(self.X1D) + np.random.randn(20, 1) * 0.05
|
2012-11-29 16:28:11 +00:00
|
|
|
|
|
|
|
|
######################################
|
2013-06-05 13:02:03 +01:00
|
|
|
# # 2 dimensional example
|
2012-11-29 16:28:11 +00:00
|
|
|
|
|
|
|
|
# sample inputs and outputs
|
2013-06-05 13:02:03 +01:00
|
|
|
self.X2D = np.random.uniform(-3., 3., (40, 2))
|
|
|
|
|
self.Y2D = np.sin(self.X2D[:, 0:1]) * np.sin(self.X2D[:, 1:2]) + np.random.randn(40, 1) * 0.05
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-09-02 11:54:50 +01:00
|
|
|
def check_model(self, kern, model_type='GPRegression', dimension=1, uncertain_inputs=False):
|
2013-06-05 13:02:03 +01:00
|
|
|
# Get the correct gradients
|
2012-11-29 16:28:11 +00:00
|
|
|
if dimension == 1:
|
|
|
|
|
X = self.X1D
|
|
|
|
|
Y = self.Y1D
|
|
|
|
|
else:
|
|
|
|
|
X = self.X2D
|
|
|
|
|
Y = self.Y2D
|
2013-06-05 13:02:03 +01:00
|
|
|
# Get model type (GPRegression, SparseGPRegression, etc)
|
2012-11-29 16:28:11 +00:00
|
|
|
model_fit = getattr(GPy.models, model_type)
|
|
|
|
|
|
2014-03-04 11:28:29 +00:00
|
|
|
# noise = GPy.kern.White(dimension)
|
2014-03-13 12:28:56 +00:00
|
|
|
kern = kern # + noise
|
2013-07-18 15:15:09 +01:00
|
|
|
if uncertain_inputs:
|
|
|
|
|
m = model_fit(X, Y, kernel=kern, X_variance=np.random.rand(X.shape[0], X.shape[1]))
|
|
|
|
|
else:
|
|
|
|
|
m = model_fit(X, Y, kernel=kern)
|
2012-11-29 16:28:11 +00:00
|
|
|
m.randomize()
|
|
|
|
|
# contrain all parameters to be positive
|
|
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_rbf_1d(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with rbf kernel with white kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbf = GPy.kern.RBF(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbf, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_rbf_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the GP regression with rbf kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbf = GPy.kern.RBF(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbf, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_rbf_ARD_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the GP regression with rbf kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
k = GPy.kern.RBF(2, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(k, model_type='GPRegression', dimension=2)
|
2013-01-18 16:14:13 +00:00
|
|
|
|
2013-08-16 18:28:16 +01:00
|
|
|
def test_GPRegression_mlp_1d(self):
|
|
|
|
|
''' Testing the GP regression with mlp kernel with white kernel on 1d data '''
|
2014-03-13 12:28:56 +00:00
|
|
|
mlp = GPy.kern.MLP(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(mlp, model_type='GPRegression', dimension=1)
|
2013-08-16 18:28:16 +01:00
|
|
|
|
2014-03-13 13:13:15 +00:00
|
|
|
#TODO:
|
|
|
|
|
#def test_GPRegression_poly_1d(self):
|
|
|
|
|
# ''' Testing the GP regression with polynomial kernel with white kernel on 1d data '''
|
|
|
|
|
# mlp = GPy.kern.Poly(1, degree=5)
|
|
|
|
|
# self.check_model(mlp, model_type='GPRegression', dimension=1)
|
2013-08-18 08:18:27 +02:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern52_1D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with matern52 kernel on 1d data '''
|
|
|
|
|
matern52 = GPy.kern.Matern52(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern52, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern52_2D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with matern52 kernel on 2d data '''
|
|
|
|
|
matern52 = GPy.kern.Matern52(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern52, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern52_ARD_2D(self):
|
2013-01-18 16:14:13 +00:00
|
|
|
''' Testing the GP regression with matern52 kernel on 2d data '''
|
2013-06-05 13:02:03 +01:00
|
|
|
matern52 = GPy.kern.Matern52(2, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern52, model_type='GPRegression', dimension=2)
|
2013-01-18 16:14:13 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern32_1D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with matern32 kernel on 1d data '''
|
|
|
|
|
matern32 = GPy.kern.Matern32(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern32, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern32_2D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with matern32 kernel on 2d data '''
|
|
|
|
|
matern32 = GPy.kern.Matern32(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern32, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_matern32_ARD_2D(self):
|
2013-01-18 16:14:13 +00:00
|
|
|
''' Testing the GP regression with matern32 kernel on 2d data '''
|
2013-06-05 13:02:03 +01:00
|
|
|
matern32 = GPy.kern.Matern32(2, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(matern32, model_type='GPRegression', dimension=2)
|
2013-01-18 16:14:13 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_exponential_1D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with exponential kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
exponential = GPy.kern.Exponential(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(exponential, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_exponential_2D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with exponential kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
exponential = GPy.kern.Exponential(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(exponential, model_type='GPRegression', dimension=2)
|
2013-01-18 16:14:13 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_exponential_ARD_2D(self):
|
2013-01-18 16:14:13 +00:00
|
|
|
''' Testing the GP regression with exponential kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
exponential = GPy.kern.Exponential(2, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(exponential, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_bias_kern_1D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with bias kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
bias = GPy.kern.Bias(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(bias, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_bias_kern_2D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with bias kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
bias = GPy.kern.Bias(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(bias, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_linear_kern_1D_ARD(self):
|
2013-04-26 19:32:33 +01:00
|
|
|
''' Testing the GP regression with linear kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
linear = GPy.kern.Linear(1, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(linear, model_type='GPRegression', dimension=1)
|
2013-04-26 19:32:33 +01:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_linear_kern_2D_ARD(self):
|
2013-04-26 19:32:33 +01:00
|
|
|
''' Testing the GP regression with linear kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
linear = GPy.kern.Linear(2, ARD=True)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(linear, model_type='GPRegression', dimension=2)
|
2013-04-26 19:32:33 +01:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_linear_kern_1D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with linear kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
linear = GPy.kern.Linear(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(linear, model_type='GPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_GPRegression_linear_kern_2D(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the GP regression with linear kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
linear = GPy.kern.Linear(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(linear, model_type='GPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_SparseGPRegression_rbf_white_kern_1d(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
''' Testing the sparse GP regression with rbf kernel with white kernel on 1d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbf = GPy.kern.RBF(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbf, model_type='SparseGPRegression', dimension=1)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-06-05 13:02:03 +01:00
|
|
|
def test_SparseGPRegression_rbf_white_kern_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the sparse GP regression with rbf kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbf = GPy.kern.RBF(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbf, model_type='SparseGPRegression', dimension=2)
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2013-07-18 15:15:09 +01:00
|
|
|
def test_SparseGPRegression_rbf_linear_white_kern_1D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the sparse GP regression with rbf kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbflin, model_type='SparseGPRegression', dimension=1)
|
2013-07-18 15:15:09 +01:00
|
|
|
|
|
|
|
|
def test_SparseGPRegression_rbf_linear_white_kern_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the sparse GP regression with rbf kernel on 2d data '''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2)
|
2013-07-18 15:15:09 +01:00
|
|
|
|
2014-03-13 12:28:56 +00:00
|
|
|
# @unittest.expectedFailure
|
2013-07-18 15:15:09 +01:00
|
|
|
def test_SparseGPRegression_rbf_linear_white_kern_2D_uncertain_inputs(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the sparse GP regression with rbf, linear kernel on 2d data with uncertain inputs'''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbflin = GPy.kern.RBF(2) + GPy.kern.Linear(2)
|
2014-01-24 09:41:07 +00:00
|
|
|
raise unittest.SkipTest("This is not implemented yet!")
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbflin, model_type='SparseGPRegression', dimension=2, uncertain_inputs=1)
|
2013-07-18 15:15:09 +01:00
|
|
|
|
2014-03-13 12:28:56 +00:00
|
|
|
# @unittest.expectedFailure
|
2013-07-18 15:15:09 +01:00
|
|
|
def test_SparseGPRegression_rbf_linear_white_kern_1D_uncertain_inputs(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
''' Testing the sparse GP regression with rbf, linear kernel on 1d data with uncertain inputs'''
|
2014-03-04 11:28:29 +00:00
|
|
|
rbflin = GPy.kern.RBF(1) + GPy.kern.Linear(1)
|
2014-01-24 09:41:07 +00:00
|
|
|
raise unittest.SkipTest("This is not implemented yet!")
|
2013-09-02 11:54:50 +01:00
|
|
|
self.check_model(rbflin, model_type='SparseGPRegression', dimension=1, uncertain_inputs=1)
|
2013-07-18 15:15:09 +01:00
|
|
|
|
2012-11-29 16:28:11 +00:00
|
|
|
def test_GPLVM_rbf_bias_white_kern_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
""" Testing GPLVM with rbf + bias kernel """
|
2013-06-05 11:17:15 +01:00
|
|
|
N, input_dim, D = 50, 1, 2
|
|
|
|
|
X = np.random.rand(N, input_dim)
|
2014-03-04 11:28:29 +00:00
|
|
|
k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05)
|
2012-11-29 16:28:11 +00:00
|
|
|
K = k.K(X)
|
2013-06-05 13:02:03 +01:00
|
|
|
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
|
|
|
|
|
m = GPy.models.GPLVM(Y, input_dim, kernel=k)
|
2012-11-29 16:28:11 +00:00
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
|
|
|
|
def test_GPLVM_rbf_linear_white_kern_2D(self):
|
2013-09-02 11:54:50 +01:00
|
|
|
""" Testing GPLVM with rbf + bias kernel """
|
2013-06-05 11:17:15 +01:00
|
|
|
N, input_dim, D = 50, 1, 2
|
|
|
|
|
X = np.random.rand(N, input_dim)
|
2014-03-04 11:28:29 +00:00
|
|
|
k = GPy.kern.Linear(input_dim) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05)
|
2012-11-29 16:28:11 +00:00
|
|
|
K = k.K(X)
|
2013-06-05 13:02:03 +01:00
|
|
|
Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T
|
|
|
|
|
m = GPy.models.GPLVM(Y, input_dim, init='PCA', kernel=k)
|
2012-11-29 16:28:11 +00:00
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
2014-03-18 17:59:32 +00:00
|
|
|
@unittest.expectedFailure
|
2013-02-01 16:21:26 +00:00
|
|
|
def test_GP_EP_probit(self):
|
2012-11-29 16:28:11 +00:00
|
|
|
N = 20
|
2013-06-05 13:02:03 +01:00
|
|
|
X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None]
|
|
|
|
|
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
2014-03-04 11:28:29 +00:00
|
|
|
kernel = GPy.kern.RBF(1)
|
2014-03-13 12:28:56 +00:00
|
|
|
m = GPy.models.GPClassification(X, Y, kernel=kernel)
|
2013-03-06 15:43:58 +00:00
|
|
|
m.update_likelihood_approximation()
|
|
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
2014-03-18 17:59:32 +00:00
|
|
|
@unittest.expectedFailure
|
2013-03-06 15:43:58 +00:00
|
|
|
def test_sparse_EP_DTC_probit(self):
|
|
|
|
|
N = 20
|
2013-06-05 13:02:03 +01:00
|
|
|
X = np.hstack([np.random.normal(5, 2, N / 2), np.random.normal(10, 2, N / 2)])[:, None]
|
|
|
|
|
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
|
|
|
|
Z = np.linspace(0, 15, 4)[:, None]
|
2014-03-04 11:28:29 +00:00
|
|
|
kernel = GPy.kern.RBF(1)
|
2014-03-13 12:28:56 +00:00
|
|
|
m = GPy.models.SparseGPClassification(X, Y, kernel=kernel, Z=Z)
|
|
|
|
|
# distribution = GPy.likelihoods.likelihood_functions.Bernoulli()
|
|
|
|
|
# likelihood = GPy.likelihoods.EP(Y, distribution)
|
|
|
|
|
# m = GPy.core.SparseGP(X, likelihood, kernel, Z)
|
|
|
|
|
# m.ensure_default_constraints()
|
2013-03-06 15:43:58 +00:00
|
|
|
m.update_likelihood_approximation()
|
|
|
|
|
self.assertTrue(m.checkgrad())
|
2012-11-29 16:28:11 +00:00
|
|
|
|
2014-03-18 17:59:32 +00:00
|
|
|
@unittest.expectedFailure
|
2012-11-29 16:28:11 +00:00
|
|
|
def test_generalized_FITC(self):
|
|
|
|
|
N = 20
|
2013-06-05 13:02:03 +01:00
|
|
|
X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None]
|
2014-03-04 11:28:29 +00:00
|
|
|
k = GPy.kern.RBF(1) + GPy.kern.White(1)
|
2014-03-13 12:28:56 +00:00
|
|
|
Y = np.hstack([np.ones(N / 2), np.zeros(N / 2)])[:, None]
|
|
|
|
|
m = GPy.models.FITCClassification(X, Y, kernel=k)
|
2013-06-05 18:01:53 +01:00
|
|
|
m.update_likelihood_approximation()
|
2012-11-29 16:28:11 +00:00
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
2013-09-13 18:09:59 +01:00
|
|
|
def multioutput_regression_1D(self):
|
|
|
|
|
X1 = np.random.rand(50, 1) * 8
|
|
|
|
|
X2 = np.random.rand(30, 1) * 5
|
|
|
|
|
X = np.vstack((X1, X2))
|
|
|
|
|
Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
|
|
|
|
Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05
|
|
|
|
|
Y = np.vstack((Y1, Y2))
|
|
|
|
|
|
2014-03-04 11:28:29 +00:00
|
|
|
k1 = GPy.kern.RBF(1)
|
2014-03-13 12:28:56 +00:00
|
|
|
m = GPy.models.GPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
|
2013-09-13 18:09:59 +01:00
|
|
|
m.constrain_fixed('.*rbf_var', 1.)
|
|
|
|
|
self.assertTrue(m.checkgrad())
|
|
|
|
|
|
2013-09-20 16:57:01 +01:00
|
|
|
def multioutput_sparse_regression_1D(self):
|
|
|
|
|
X1 = np.random.rand(500, 1) * 8
|
|
|
|
|
X2 = np.random.rand(300, 1) * 5
|
|
|
|
|
X = np.vstack((X1, X2))
|
|
|
|
|
Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
|
|
|
|
Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05
|
|
|
|
|
Y = np.vstack((Y1, Y2))
|
|
|
|
|
|
2014-03-04 11:28:29 +00:00
|
|
|
k1 = GPy.kern.RBF(1)
|
2014-03-13 12:28:56 +00:00
|
|
|
m = GPy.models.SparseGPMultioutputRegression(X_list=[X1, X2], Y_list=[Y1, Y2], kernel_list=[k1])
|
2013-09-20 16:57:01 +01:00
|
|
|
m.constrain_fixed('.*rbf_var', 1.)
|
|
|
|
|
self.assertTrue(m.checkgrad())
|
2013-09-13 18:09:59 +01:00
|
|
|
|
2012-11-29 16:28:11 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
print "Running unit tests, please be (very) patient..."
|
|
|
|
|
unittest.main()
|