merging by hand...

This commit is contained in:
James Hensman 2013-06-17 16:00:17 +01:00
commit 850f2fb470
31 changed files with 597 additions and 96 deletions

View file

@ -5,3 +5,4 @@ import classification
import regression
import dimensionality_reduction
import tutorials
import stochastic

View file

@ -25,7 +25,6 @@ def crescent_data(seed=default_seed): # FIXME
Y[Y.flatten()==-1] = 0
m = GPy.models.GPClassification(data['X'], Y)
m.ensure_default_constraints()
#m.update_likelihood_approximation()
#m.optimize()
m.pseudo_EM()
@ -75,7 +74,6 @@ def toy_linear_1d_classification(seed=default_seed):
# Model definition
m = GPy.models.GPClassification(data['X'], Y)
m.ensure_default_constraints()
# Optimize
#m.update_likelihood_approximation()
@ -106,7 +104,6 @@ def sparse_toy_linear_1d_classification(num_inducing=10,seed=default_seed):
m = GPy.models.SparseGPClassification(data['X'], Y,num_inducing=num_inducing)
m['.*len']= 4.
m.ensure_default_constraints()
# Optimize
#m.update_likelihood_approximation()
# Parameters optimization:
@ -137,7 +134,6 @@ def sparse_crescent_data(num_inducing=10, seed=default_seed):
Y[Y.flatten()==-1]=0
m = GPy.models.SparseGPClassification(data['X'], Y,num_inducing=num_inducing)
m.ensure_default_constraints()
m['.*len'] = 10.
#m.update_likelihood_approximation()
#m.optimize()
@ -163,7 +159,6 @@ def FITC_crescent_data(num_inducing=10, seed=default_seed):
m = GPy.models.FITCClassification(data['X'], Y,num_inducing=num_inducing)
m.constrain_bounded('.*len',1.,1e3)
m.ensure_default_constraints()
m['.*len'] = 3.
#m.update_likelihood_approximation()
#m.optimize()

View file

@ -37,7 +37,6 @@ def BGPLVM(seed=default_seed):
# m.optimize(messages = 1)
# m.plot()
# pb.title('After optimisation')
m.ensure_default_constraints()
m.randomize()
m.checkgrad(verbose=1)
@ -53,7 +52,6 @@ def GPLVM_oil_100(optimize=True):
m.data_labels = data['Y'].argmax(axis=1)
# optimize
m.ensure_default_constraints()
if optimize:
m.optimize('scg', messages=1)
@ -108,7 +106,6 @@ def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False
m.data_colors = c
m.data_t = t
m.ensure_default_constraints()
m['rbf_lengthscale'] = 1. # X.var(0).max() / X.var(0)
m['noise_variance'] = Y.var() / 100.
m['bias_variance'] = 0.05
@ -134,7 +131,6 @@ def BGPLVM_oil(optimize=True, N=200, Q=10, num_inducing=15, max_f_eval=50, plot=
m['.*lengt'] = 1. # m.X.var(0).max() / m.X.var(0)
m['noise'] = Yn.var() / 100.
m.ensure_default_constraints()
# optimize
if optimize:
@ -159,7 +155,6 @@ def oil_100():
m = GPy.models.GPLVM(data['X'], 2)
# optimize
m.ensure_default_constraints()
m.optimize(messages=1, max_iters=2)
# plot
@ -239,7 +234,6 @@ def bgplvm_simulation_matlab_compare():
# X=mu,
# X_variance=S,
_debug=False)
m.ensure_default_constraints()
m.auto_scale_factor = True
m['noise'] = Y.var() / 100.
m['linear_variance'] = .01
@ -263,7 +257,6 @@ def bgplvm_simulation(optimize='scg',
m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, _debug=True)
# m.constrain('variance|noise', logexp_clipped())
m.ensure_default_constraints()
m['noise'] = Y.var() / 100.
m['linear_variance'] = .01
@ -292,7 +285,6 @@ def mrd_simulation(optimize=True, plot=True, plot_sim=True, **kw):
for i, Y in enumerate(Ylist):
m['{}_noise'.format(i + 1)] = Y.var() / 100.
m.ensure_default_constraints()
# DEBUG
# np.seterr("raise")
@ -320,7 +312,6 @@ def brendan_faces():
# optimize
m.constrain('rbf|noise|white', GPy.core.transformations.logexp_clipped())
m.ensure_default_constraints()
m.optimize('scg', messages=1, max_f_eval=10000)
ax = m.plot_latent(which_indices=(0, 1))
@ -346,7 +337,6 @@ def stick():
data = GPy.util.datasets.stick()
# optimize
m = GPy.models.GPLVM(data['Y'], 2)
m.ensure_default_constraints()
m.optimize(messages=1, max_f_eval=10000)
m._set_params(m._get_params())
plt.clf
@ -388,7 +378,6 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True):
m = GPy.models.GPLVM(data['Y'], 2, normalize_Y=True)
# optimize
m.ensure_default_constraints()
m.optimize(messages=1, max_f_eval=10000)
ax = m.plot_latent()
@ -420,7 +409,6 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True):
# m.set('iip', Z)
# m.set('bias', 1e-4)
# # optimize
# # m.ensure_default_constraints()
#
# import pdb; pdb.set_trace()
# m.optimize('tnc', messages=1)

View file

@ -18,7 +18,6 @@ def toy_rbf_1d(optimizer='tnc', max_nb_eval_optim=100):
m = GPy.models.GPRegression(data['X'],data['Y'])
# optimize
m.ensure_default_constraints()
m.optimize(optimizer, max_f_eval=max_nb_eval_optim)
# plot
m.plot()
@ -36,7 +35,6 @@ def rogers_girolami_olympics(optim_iters=100):
m['rbf_lengthscale'] = 10
# optimize
m.ensure_default_constraints()
m.optimize(max_f_eval=optim_iters)
# plot
@ -52,7 +50,6 @@ def toy_rbf_1d_50(optim_iters=100):
m = GPy.models.GPRegression(data['X'],data['Y'])
# optimize
m.ensure_default_constraints()
m.optimize(max_f_eval=optim_iters)
# plot
@ -68,7 +65,6 @@ def silhouette(optim_iters=100):
m = GPy.models.GPRegression(data['X'],data['Y'])
# optimize
m.ensure_default_constraints()
m.optimize(messages=True,max_f_eval=optim_iters)
print(m)
@ -92,7 +88,6 @@ def coregionalisation_toy2(optim_iters=100):
m = GPy.models.GPRegression(X,Y,kernel=k)
m.constrain_fixed('.*rbf_var',1.)
#m.constrain_positive('.*kappa')
m.ensure_default_constraints()
m.optimize('sim',messages=1,max_f_eval=optim_iters)
pb.figure()
@ -124,7 +119,6 @@ def coregionalisation_toy(optim_iters=100):
m = GPy.models.GPRegression(X,Y,kernel=k)
m.constrain_fixed('.*rbf_var',1.)
#m.constrain_positive('kappa')
m.ensure_default_constraints()
m.optimize(max_f_eval=optim_iters)
pb.figure()
@ -162,7 +156,6 @@ def coregionalisation_sparse(optim_iters=100):
m.constrain_fixed('.*rbf_var',1.)
m.constrain_fixed('iip')
m.constrain_bounded('noise_variance',1e-3,1e-1)
m.ensure_default_constraints()
m.optimize_restarts(5, robust=True, messages=1, max_f_eval=optim_iters)
#plotting:
@ -189,11 +182,9 @@ def multiple_optima(gene_number=937,resolution=80, model_restarts=10, seed=10000
log_SNRs = np.linspace(-3., 4., resolution)
data = GPy.util.datasets.della_gatta_TRP63_gene_expression(gene_number)
# Sub sample the data to ensure multiple optima
#data['Y'] = data['Y'][0::2, :]
#data['X'] = data['X'][0::2, :]
# Remove the mean (no bias kernel to ensure signal/noise is in RBF/white)
data['Y'] = data['Y'] - np.mean(data['Y'])
lls = GPy.examples.regression._contour_data(data, length_scales, log_SNRs, GPy.kern.rbf)
@ -220,7 +211,6 @@ def multiple_optima(gene_number=937,resolution=80, model_restarts=10, seed=10000
optim_point_y[0] = np.log10(m['rbf_variance']) - np.log10(m['noise_variance']);
# optimize
m.ensure_default_constraints()
m.optimize('scg', xtol=1e-6, ftol=1e-6, max_f_eval=optim_iters)
optim_point_x[1] = m['rbf_lengthscale']
@ -273,7 +263,6 @@ def sparse_GP_regression_1D(N = 400, num_inducing = 5, optim_iters=100):
# create simple GP Model
m = GPy.models.SparseGPRegression(X, Y, kernel, num_inducing=num_inducing)
m.ensure_default_constraints()
m.checkgrad(verbose=1)
m.optimize('tnc', messages = 1, max_f_eval=optim_iters)
@ -294,7 +283,6 @@ def sparse_GP_regression_2D(N = 400, num_inducing = 50, optim_iters=100):
m = GPy.models.SparseGPRegression(X,Y,kernel, num_inducing = num_inducing)
# contrain all parameters to be positive (but not inducing inputs)
m.ensure_default_constraints()
m.set('.*len',2.)
m.checkgrad()
@ -320,7 +308,6 @@ def uncertain_inputs_sparse_regression(optim_iters=100):
# create simple GP Model - no input uncertainty on this one
m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z)
m.ensure_default_constraints()
m.optimize('scg', messages=1, max_f_eval=optim_iters)
m.plot(ax=axes[0])
axes[0].set_title('no input uncertainty')
@ -328,7 +315,6 @@ def uncertain_inputs_sparse_regression(optim_iters=100):
#the same Model with uncertainty
m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z, X_variance=S)
m.ensure_default_constraints()
m.optimize('scg', messages=1, max_f_eval=optim_iters)
m.plot(ax=axes[1])
axes[1].set_title('with input uncertainty')

View file

@ -0,0 +1,40 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import pylab as pb
import numpy as np
import GPy
def toy_1d():
N = 2000
M = 20
#create data
X = np.linspace(0,32,N)[:,None]
Z = np.linspace(0,32,M)[:,None]
Y = np.sin(X) + np.cos(0.3*X) + np.random.randn(*X.shape)/np.sqrt(50.)
m = GPy.models.SVIGPRegression(X,Y, batchsize=10, Z=Z)
m.constrain_bounded('noise_variance',1e-3,1e-1)
m.param_steplength = 1e-4
fig = pb.figure()
ax = fig.add_subplot(111)
def cb():
ax.cla()
m.plot(ax=ax,Z_height=-3)
ax.set_ylim(-3,3)
fig.canvas.draw()
m.optimize(500, callback=cb, callback_interval=1)
m.plot_traces()
return m

View file

@ -24,7 +24,6 @@ def tuto_GP_regression():
print m
m.plot()
m.ensure_default_constraints()
m.constrain_positive('')
m.unconstrain('') # may be used to remove the previous constrains
@ -135,7 +134,6 @@ def tuto_kernel_overview():
pb.ylabel("+ ",rotation='horizontal',fontsize='30')
m.plot(ax=axs, which_parts=[False,False,False,True])
m.ensure_default_constraints()
return(m)
@ -144,6 +142,5 @@ def model_interaction():
Y = np.sin(X) + np.random.randn(*X.shape)*0.01 + 5.
k = GPy.kern.rbf(1) + GPy.kern.bias(1)
m = GPy.models.GPRegression(X, Y, kernel=k)
m.ensure_default_constraints()
return m