very weird merge conflict, including in files that I did not change

This commit is contained in:
James Hensman 2014-03-18 16:46:37 +00:00
commit 601175de2d
73 changed files with 2234 additions and 1567 deletions

View file

@ -89,7 +89,7 @@ def toy_linear_1d_classification_laplace(seed=default_seed, optimize=True, plot=
likelihood = GPy.likelihoods.Bernoulli()
laplace_inf = GPy.inference.latent_function_inference.Laplace()
kernel = GPy.kern.rbf(1)
kernel = GPy.kern.RBF(1)
# Model definition
m = GPy.core.GP(data['X'], Y, kernel=kernel, likelihood=likelihood, inference_method=laplace_inf)

View file

@ -0,0 +1,80 @@
import numpy as np
import pylab as pb
import GPy
pb.ion()
pb.close('all')
X1 = np.arange(3)[:,None]
X2 = np.arange(4)[:,None]
I1 = np.zeros_like(X1)
I2 = np.ones_like(X2)
_X = np.vstack([ X1, X2 ])
_I = np.vstack([ I1, I2 ])
X = np.hstack([ _X, _I ])
Y1 = np.sin(X1/8.)
Y2 = np.cos(X2/8.)
Bias = GPy.kern.Bias(1,active_dims=[0])
Coreg = GPy.kern.Coregionalize(1,2,active_dims=[1])
K = Bias.prod(Coreg,name='X')
#K.coregion.W = 0
#print K.coregion.W
#print Bias.K(_X,_X)
#print K.K(X,X)
#pb.matshow(K.K(X,X))
Mlist = [GPy.kern.Matern32(1,lengthscale=20.,name="Mat")]
kern = GPy.util.multioutput.LCM(input_dim=1,num_outputs=2,kernels_list=Mlist,name='H')
kern.B.W = 0
kern.B.kappa = 1.
#kern.B.W.fix()
#kern.B.kappa.fix()
#m = GPy.models.GPCoregionalizedRegression(X_list=[X1,X2], Y_list=[Y1,Y2], kernel=kern)
m = GPy.models.SparseGPCoregionalizedRegression(X_list=[X1], Y_list=[Y1], kernel=kern)
#m.optimize()
m.checkgrad(verbose=1)
fig = pb.figure()
ax0 = fig.add_subplot(211)
ax1 = fig.add_subplot(212)
slices = GPy.util.multioutput.get_slices([Y1,Y2])
m.plot(fixed_inputs=[(1,0)],which_data_rows=slices[0],ax=ax0)
#m.plot(fixed_inputs=[(1,1)],which_data_rows=slices[1],ax=ax1)
"""
X1 = 100 * np.random.rand(100)[:,None]
X2 = 100 * np.random.rand(100)[:,None]
#X1.sort()
#X2.sort()
Y1 = np.sin(X1/10.) + np.random.rand(100)[:,None]
Y2 = np.cos(X2/10.) + np.random.rand(100)[:,None]
Mlist = [GPy.kern.Matern32(1,lengthscale=20.,name="Mat")]
kern = GPy.util.multioutput.LCM(input_dim=1,num_outputs=12,kernels_list=Mlist,name='H')
m = GPy.models.GPCoregionalizedRegression(X_list=[X1,X2], Y_list=[Y1,Y2], kernel=kern)
m.optimize()
fig = pb.figure()
ax0 = fig.add_subplot(211)
ax1 = fig.add_subplot(212)
slices = GPy.util.multioutput.get_slices([Y1,Y2])
m.plot(fixed_inputs=[(1,0)],which_data_rows=slices[0],ax=ax0)
m.plot(fixed_inputs=[(1,1)],which_data_rows=slices[1],ax=ax1)
"""

View file

@ -324,14 +324,14 @@ def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw):
D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5
_, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim)
likelihood_list = [Gaussian(x, normalize=True) for x in Ylist]
k = kern.Linear(Q, ARD=True) + kern.Bias(Q, _np.exp(-2)) + kern.White(Q, _np.exp(-2))
m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw)
m.ensure_default_constraints()
for i, bgplvm in enumerate(m.bgplvms):
m['{}_noise'.format(i)] = bgplvm.likelihood.Y.var() / 500.
#Ylist = [Ylist[0]]
k = [kern.Linear(Q, ARD=True) + kern.White(Q, 1e-4) for _ in range(len(Ylist))]
m = MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernel=k, initx="", initz='permute', **kw)
m['.*noise'] = [Y.var()/500. for Y in Ylist]
#for i, Y in enumerate(Ylist):
# m['.*Y_{}.*Gaussian.*noise'.format(i)] = Y.var(1) / 500.
if optimize:
print "Optimizing Model:"

View file

@ -318,7 +318,7 @@ def toy_ARD(max_iters=1000, kernel_type='linear', num_samples=300, D=4, optimize
Y /= Y.std()
if kernel_type == 'linear':
kernel = GPy.kern.linear(X.shape[1], ARD=1)
kernel = GPy.kern.Linear(X.shape[1], ARD=1)
elif kernel_type == 'rbf_inv':
kernel = GPy.kern.RBF_inv(X.shape[1], ARD=1)
else:
@ -357,7 +357,7 @@ def toy_ARD_sparse(max_iters=1000, kernel_type='linear', num_samples=300, D=4, o
Y /= Y.std()
if kernel_type == 'linear':
kernel = GPy.kern.linear(X.shape[1], ARD=1)
kernel = GPy.kern.Linear(X.shape[1], ARD=1)
elif kernel_type == 'rbf_inv':
kernel = GPy.kern.RBF_inv(X.shape[1], ARD=1)
else:
@ -468,7 +468,7 @@ def sparse_GP_regression_2D(num_samples=400, num_inducing=50, max_iters=100, opt
def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True):
"""Run a 1D example of a sparse GP regression with uncertain inputs."""
fig, axes = pb.subplots(1, 2, figsize=(12, 5))
fig, axes = pb.subplots(1, 2, figsize=(12, 5), sharex=True, sharey=True)
# sample inputs and outputs
S = np.ones((20, 1))