dim reduction adaption

This commit is contained in:
Max Zwiessele 2013-06-05 17:33:37 +01:00
parent a6ed003194
commit 71462a1347
3 changed files with 12 additions and 13 deletions

View file

@ -27,7 +27,7 @@ def BGPLVM(seed=default_seed):
# k = GPy.kern.rbf(Q, ARD = False) + GPy.kern.white(Q, 0.00001) # k = GPy.kern.rbf(Q, ARD = False) + GPy.kern.white(Q, 0.00001)
m = GPy.models.BayesianGPLVM(Y, Q, kernel=k, num_inducing=num_inducing) m = GPy.models.BayesianGPLVM(Y, Q, kernel=k, num_inducing=num_inducing)
m.constrain_positive('(rbf|bias|noise|white|S)') # m.constrain_positive('(rbf|bias|noise|white|S)')
# m.constrain_fixed('S', 1) # m.constrain_fixed('S', 1)
# pb.figure() # pb.figure()
@ -117,10 +117,9 @@ def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False
m.optimize('scg', messages=1) m.optimize('scg', messages=1)
return m return m
def BGPLVM_oil(optimize=True, N=100, Q=5, num_inducing=25, max_f_eval=4e3, plot=False, **k): def BGPLVM_oil(optimize=True, N=200, Q=10, num_inducing=15, max_f_eval=4e3, plot=False, **k):
np.random.seed(0) np.random.seed(0)
data = GPy.util.datasets.oil() data = GPy.util.datasets.oil()
from GPy.core.transformations import logexp_clipped
# create simple GP model # create simple GP model
kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2)) kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2))
@ -132,14 +131,14 @@ def BGPLVM_oil(optimize=True, N=100, Q=5, num_inducing=25, max_f_eval=4e3, plot=
m.data_labels = data['Y'][:N].argmax(axis=1) m.data_labels = data['Y'][:N].argmax(axis=1)
# m.constrain('variance|leng', logexp_clipped()) # m.constrain('variance|leng', logexp_clipped())
m['lengt'] = m.X.var(0).max() / m.X.var(0) m['.*lengt'] = 1. # m.X.var(0).max() / m.X.var(0)
m['noise'] = Yn.var() / 100. m['noise'] = Yn.var() / 100.
m.ensure_default_constraints() m.ensure_default_constraints()
# optimize # optimize
if optimize: if optimize:
m.optimize('scg', messages=1, max_f_eval=max_f_eval) m.optimize('scg', messages=1, max_f_eval=max_f_eval, gtol=.05)
if plot: if plot:
y = m.likelihood.Y[0, :] y = m.likelihood.Y[0, :]
@ -266,9 +265,9 @@ def bgplvm_simulation(optimize='scg',
if optimize: if optimize:
print "Optimizing model:" print "Optimizing model:"
m.optimize('scg', max_iters=max_f_eval, m.optimize(optimize, max_iters=max_f_eval,
max_f_eval=max_f_eval, max_f_eval=max_f_eval,
messages=True, gtol=1e-6) messages=True, gtol=.05)
if plot: if plot:
m.plot_X_1d("BGPLVM Latent Space 1D") m.plot_X_1d("BGPLVM Latent Space 1D")
m.kern.plot_ARD('BGPLVM Simulation ARD Parameters') m.kern.plot_ARD('BGPLVM Simulation ARD Parameters')

View file

@ -18,10 +18,10 @@ class opt_SGD(Optimizer):
""" """
def __init__(self, start, iterations = 10, learning_rate = 1e-4, momentum = 0.9, Model = None, messages = False, batch_size = 1, self_paced = False, center = True, iteration_file = None, learning_rate_adaptation=None, actual_iter=None, schedule=None, **kwargs): def __init__(self, start, iterations = 10, learning_rate = 1e-4, momentum = 0.9, model = None, messages = False, batch_size = 1, self_paced = False, center = True, iteration_file = None, learning_rate_adaptation=None, actual_iter=None, schedule=None, **kwargs):
self.opt_name = "Stochastic Gradient Descent" self.opt_name = "Stochastic Gradient Descent"
self.Model = Model self.Model = model
self.iterations = iterations self.iterations = iterations
self.momentum = momentum self.momentum = momentum
self.learning_rate = learning_rate self.learning_rate = learning_rate
@ -42,11 +42,11 @@ class opt_SGD(Optimizer):
self.learning_rate_0 = self.learning_rate.mean() self.learning_rate_0 = self.learning_rate.mean()
self.schedule = schedule self.schedule = schedule
# if len([p for p in self.Model.kern.parts if p.name == 'bias']) == 1: # if len([p for p in self.model.kern.parts if p.name == 'bias']) == 1:
# self.param_traces.append(('bias',[])) # self.param_traces.append(('bias',[]))
# if len([p for p in self.Model.kern.parts if p.name == 'linear']) == 1: # if len([p for p in self.model.kern.parts if p.name == 'linear']) == 1:
# self.param_traces.append(('linear',[])) # self.param_traces.append(('linear',[]))
# if len([p for p in self.Model.kern.parts if p.name == 'rbf']) == 1: # if len([p for p in self.model.kern.parts if p.name == 'rbf']) == 1:
# self.param_traces.append(('rbf_var',[])) # self.param_traces.append(('rbf_var',[]))
self.param_traces = dict(self.param_traces) self.param_traces = dict(self.param_traces)

View file

@ -78,7 +78,7 @@ class MRD(Model):
self.NQ = self.num_data * self.input_dim self.NQ = self.num_data * self.input_dim
self.MQ = self.num_inducing * self.input_dim self.MQ = self.num_inducing * self.input_dim
Model.__init__(self) # @UndefinedVariable model.__init__(self) # @UndefinedVariable
self._set_params(self._get_params()) self._set_params(self._get_params())
@property @property