Example update to run oil dataset

This commit is contained in:
Max Zwiessele 2013-05-10 11:21:53 +01:00
parent 214eab5f2e
commit 8a843378a0

View file

@ -63,7 +63,7 @@ def GPLVM_oil_100(optimize=True):
m.plot_latent(labels=m.data_labels) m.plot_latent(labels=m.data_labels)
return m return m
def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300, plot=False): def BGPLVM_oil(optimize=True, N=100, Q=10, M=20, max_f_eval=300, plot=False):
data = GPy.util.datasets.oil() data = GPy.util.datasets.oil()
# create simple GP model # create simple GP model
@ -72,19 +72,19 @@ def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300, plot=False):
m = GPy.models.Bayesian_GPLVM(Y, Q, kernel=kernel, M=M) m = GPy.models.Bayesian_GPLVM(Y, Q, kernel=kernel, M=M)
m.data_labels = data['Y'][:N].argmax(axis=1) m.data_labels = data['Y'][:N].argmax(axis=1)
m.constrain('variance', logexp_clipped())
m.constrain('length', logexp_clipped())
m['lengt'] = 100.
m.ensure_default_constraints()
# optimize # optimize
if optimize: if optimize:
m.constrain_fixed('noise', 1. / Y.var()) m.unconstrain('noise'); m.constrain_fixed('noise', Y.var() / 100.)
m.constrain('variance', logexp_clipped()) m.optimize('scg', messages=1, max_f_eval=150)
m['lengt'] = 1000
m.ensure_default_constraints()
m.optimize('scg', messages=1, max_f_eval=max(80, max_f_eval))
m.unconstrain('noise') m.unconstrain('noise')
m.constrain_positive('noise') m.constrain('noise', logexp_clipped())
m.optimize('scg', messages=1, max_f_eval=max(0, max_f_eval - 80)) m.optimize('scg', messages=1, max_f_eval=max_f_eval)
else:
m.ensure_default_constraints()
if plot: if plot:
y = m.likelihood.Y[0, :] y = m.likelihood.Y[0, :]
@ -92,7 +92,7 @@ def BGPLVM_oil(optimize=True, N=100, Q=10, M=15, max_f_eval=300, plot=False):
plt.sca(latent_axes) plt.sca(latent_axes)
m.plot_latent() m.plot_latent()
data_show = GPy.util.visualize.vector_show(y) data_show = GPy.util.visualize.vector_show(y)
lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes) lvm_visualizer = GPy.util.visualize.lvm_dimselect(m.X[0, :], m, data_show, latent_axes=latent_axes) # , sense_axes=sense_axes)
raw_input('Press enter to finish') raw_input('Press enter to finish')
plt.close('all') plt.close('all')
# # plot # # plot
@ -182,7 +182,7 @@ def bgplvm_simulation_matlab_compare():
Y = sim_data['Y'] Y = sim_data['Y']
S = sim_data['S'] S = sim_data['S']
mu = sim_data['mu'] mu = sim_data['mu']
M, [_, Q] = 20, mu.shape M, [_, Q] = 3, mu.shape
from GPy.models import mrd from GPy.models import mrd
from GPy import kern from GPy import kern
@ -192,7 +192,7 @@ def bgplvm_simulation_matlab_compare():
m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k, m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k,
# X=mu, # X=mu,
# X_variance=S, # X_variance=S,
_debug=True) _debug=False)
m.ensure_default_constraints() m.ensure_default_constraints()
m.auto_scale_factor = True m.auto_scale_factor = True
m['noise'] = Y.var() / 100. m['noise'] = Y.var() / 100.
@ -223,7 +223,7 @@ def bgplvm_simulation(burnin='scg', plot_sim=False,
Y = Ylist[0] Y = Ylist[0]
k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q) k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q)
# k = kern.white(Q, .00001) + kern.bias(Q) # k = kern.white(Q, .00001) + kern.bias(Q)
m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k, _debug=True) m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k, _debug=True)
# m.set('noise',) # m.set('noise',)
@ -358,7 +358,7 @@ def mrd_simulation(plot_sim=False):
# import ipdb; ipdb.set_trace() # import ipdb; ipdb.set_trace()
# np.seterrcall(ipdbonerr) # np.seterrcall(ipdbonerr)
return m # , mtest return m # , mtest
def mrd_silhouette(): def mrd_silhouette():
@ -371,7 +371,7 @@ def brendan_faces():
# optimize # optimize
m.ensure_default_constraints() m.ensure_default_constraints()
# m.optimize(messages=1, max_f_eval=10000) m.optimize(messages=1, max_f_eval=10000)
ax = m.plot_latent() ax = m.plot_latent()
y = m.likelihood.Y[0, :] y = m.likelihood.Y[0, :]