[plotting] adjusting tests for quicker plotting

This commit is contained in:
Max Zwiessele 2016-06-21 10:45:06 +01:00
parent 67444e22a5
commit e702305df2

View file

@ -74,12 +74,13 @@ except ImportError:
extensions = ['npz']
basedir = os.path.dirname(os.path.relpath(os.path.abspath(__file__)))
def _image_directories():
"""
Compute the baseline and result image directories for testing *func*.
Create the result directory if it doesn't exist.
"""
basedir = os.path.dirname(os.path.relpath(os.path.abspath(__file__)))
#module_name = __init__.__module__
#mods = module_name.split('.')
#basedir = os.path.join(*mods)
@ -397,31 +398,37 @@ def test_sparse_classification():
yield (do_test, )
def test_gplvm():
from ..examples.dimensionality_reduction import _simulate_matern
from ..kern import RBF
from ..models import GPLVM
from GPy.models import GPLVM
np.random.seed(12345)
matplotlib.rcParams.update(matplotlib.rcParamsDefault)
#matplotlib.rcParams[u'figure.figsize'] = (4,3)
matplotlib.rcParams[u'text.usetex'] = False
Q = 3
#Q = 3
# Define dataset
N = 10
k1 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,10,10,0.1,0.1]), ARD=True)
k2 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,0.1,10,0.1,10]), ARD=True)
k3 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[0.1,0.1,10,10,10]), ARD=True)
X = np.random.normal(0, 1, (N, 5))
A = np.random.multivariate_normal(np.zeros(N), k1.K(X), Q).T
B = np.random.multivariate_normal(np.zeros(N), k2.K(X), Q).T
C = np.random.multivariate_normal(np.zeros(N), k3.K(X), Q).T
#N = 60
#k1 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,10,10,0.1,0.1]), ARD=True)
#k2 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,0.1,10,0.1,10]), ARD=True)
#k3 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[0.1,0.1,10,10,10]), ARD=True)
#X = np.random.normal(0, 1, (N, 5))
#A = np.random.multivariate_normal(np.zeros(N), k1.K(X), Q).T
#B = np.random.multivariate_normal(np.zeros(N), k2.K(X), Q).T
#C = np.random.multivariate_normal(np.zeros(N), k3.K(X), Q).T
#Y = np.vstack((A,B,C))
#labels = np.hstack((np.zeros(A.shape[0]), np.ones(B.shape[0]), np.ones(C.shape[0])*2))
Y = np.vstack((A,B,C))
labels = np.hstack((np.zeros(A.shape[0]), np.ones(B.shape[0]), np.ones(C.shape[0])*2))
#k = RBF(Q, ARD=True, lengthscale=2) # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
pars = np.load(os.path.join(basedir, 'b-gplvm-save.npz'))
Y = pars['Y']
Q = pars['Q']
labels = pars['labels']
import warnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always') # always print
m = GPLVM(Y, Q, initialize=False)
m.param_array[:] = pars['gplvm_p']
m.initialize_parameter()
k = RBF(Q, ARD=True, lengthscale=2) # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
m = GPLVM(Y, Q, init="PCA", kernel=k)
m.kern.lengthscale[:] = [1./.3, 1./.1, 1./.7]
m.likelihood.variance = .001
#m.optimize(messages=0)
np.random.seed(111)
m.plot_latent(labels=labels)
@ -436,31 +443,38 @@ def test_gplvm():
yield (do_test, )
def test_bayesian_gplvm():
from ..examples.dimensionality_reduction import _simulate_matern
from ..kern import RBF
from ..models import BayesianGPLVM
np.random.seed(12345)
matplotlib.rcParams.update(matplotlib.rcParamsDefault)
#matplotlib.rcParams[u'figure.figsize'] = (4,3)
matplotlib.rcParams[u'text.usetex'] = False
Q = 3
#Q = 3
# Define dataset
N = 10
k1 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,10,10,0.1,0.1]), ARD=True)
k2 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,0.1,10,0.1,10]), ARD=True)
k3 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[0.1,0.1,10,10,10]), ARD=True)
X = np.random.normal(0, 1, (N, 5))
A = np.random.multivariate_normal(np.zeros(N), k1.K(X), Q).T
B = np.random.multivariate_normal(np.zeros(N), k2.K(X), Q).T
C = np.random.multivariate_normal(np.zeros(N), k3.K(X), Q).T
#N = 10
#k1 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,10,10,0.1,0.1]), ARD=True)
#k2 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[10,0.1,10,0.1,10]), ARD=True)
#k3 = GPy.kern.RBF(5, variance=1, lengthscale=1./np.random.dirichlet(np.r_[0.1,0.1,10,10,10]), ARD=True)
#X = np.random.normal(0, 1, (N, 5))
#A = np.random.multivariate_normal(np.zeros(N), k1.K(X), Q).T
#B = np.random.multivariate_normal(np.zeros(N), k2.K(X), Q).T
#C = np.random.multivariate_normal(np.zeros(N), k3.K(X), Q).T
Y = np.vstack((A,B,C))
labels = np.hstack((np.zeros(A.shape[0]), np.ones(B.shape[0]), np.ones(C.shape[0])*2))
#Y = np.vstack((A,B,C))
#labels = np.hstack((np.zeros(A.shape[0]), np.ones(B.shape[0]), np.ones(C.shape[0])*2))
#k = RBF(Q, ARD=True, lengthscale=2) # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
pars = np.load(os.path.join(basedir, 'b-gplvm-save.npz'))
Y = pars['Y']
Q = pars['Q']
labels = pars['labels']
import warnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always') # always print
m = BayesianGPLVM(Y, Q, initialize=False)
m.param_array[:] = pars['bgplvm_p']
m.initialize_parameter()
k = RBF(Q, ARD=True, lengthscale=2) # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
m = BayesianGPLVM(Y, Q, init="PCA", kernel=k)
m.kern.lengthscale[:] = [1./.3, 1./.1, 1./.7]
m.likelihood.variance = .001
#m.optimize(messages=0)
np.random.seed(111)
m.plot_inducing(projection='2d')