Merge branch 'devel' into plot_density

This commit is contained in:
mzwiessele 2015-10-11 14:05:09 +01:00
commit 3833ac5a49
4 changed files with 87 additions and 25 deletions

View file

@ -32,8 +32,8 @@ class Laplace(LatentFunctionInference):
"""
self._mode_finding_tolerance = 1e-7
self._mode_finding_max_iter = 60
self._mode_finding_tolerance = 1e-4
self._mode_finding_max_iter = 30
self.bad_fhat = False
#Store whether it is the first run of the inference so that we can choose whether we need
#to calculate things or reuse old variables
@ -209,9 +209,12 @@ class Laplace(LatentFunctionInference):
Ki_f_new = Ki_f + step*dKi_f
f_new = np.dot(K, Ki_f_new)
#print "new {} vs old {}".format(obj(Ki_f_new, f_new), obj(Ki_f, f))
if obj(Ki_f_new, f_new) < obj(Ki_f, f):
old_obj = obj(Ki_f, f)
new_obj = obj(Ki_f_new, f_new)
if new_obj < old_obj:
raise ValueError("Shouldn't happen, brent optimization failing")
difference = np.abs(np.sum(f_new - f)) + np.abs(np.sum(Ki_f_new - Ki_f))
difference = np.abs(new_obj - old_obj)
# difference = np.abs(np.sum(f_new - f)) + np.abs(np.sum(Ki_f_new - Ki_f))
Ki_f = Ki_f_new
f = f_new
iteration += 1
@ -316,6 +319,9 @@ class Laplace(LatentFunctionInference):
if not log_concave:
#print "Under 1e-10: {}".format(np.sum(W < 1e-6))
W = np.clip(W, 1e-6, 1e+30)
# For student-T we can clip this more intelligently. If the
# objective has hardly changed, we can increase the clipping limit
# by ((v+1)/v)/sigma2
# NOTE: when setting a parameter inside parameters_changed it will allways come to closed update circles!!!
#W.__setitem__(W < 1e-6, 1e-6, update=False) # FIXME-HACK: This is a hack since GPy can't handle negative variances which can occur
# If the likelihood is non-log-concave. We wan't to say that there is a negative variance
@ -339,7 +345,7 @@ class Laplace(LatentFunctionInference):
#compute vital matrices
C = np.dot(LiW12, K)
Ki_W_i = K - C.T.dot(C)
Ki_W_i = K - C.T.dot(C)
I_KW_i = np.eye(K.shape[0]) - np.dot(K, K_Wi_i)
logdet_I_KW = 2*np.sum(np.log(np.diag(L)))

View file

@ -133,6 +133,33 @@ class opt_lbfgsb(Optimizer):
#a more helpful error message is available in opt_result in the Error case
if opt_result[2]['warnflag']==2:
self.status = 'Error' + str(opt_result[2]['task'])
class opt_bfgs(Optimizer):
def __init__(self, *args, **kwargs):
Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "BFGS (Scipy implementation)"
def opt(self, f_fp=None, f=None, fp=None):
"""
Run the optimizer
"""
rcstrings = ['','Maximum number of iterations exceeded', 'Gradient and/or function calls not changing']
opt_dict = {}
if self.xtol is not None:
print("WARNING: bfgs doesn't have an xtol arg, so I'm going to ignore it")
if self.ftol is not None:
print("WARNING: bfgs doesn't have an ftol arg, so I'm going to ignore it")
if self.gtol is not None:
opt_dict['pgtol'] = self.gtol
opt_result = optimize.fmin_bfgs(f, self.x_init, fp, disp=self.messages,
maxiter=self.max_iters, full_output=True, **opt_dict)
self.x_opt = opt_result[0]
self.f_opt = f_fp(self.x_opt)[0]
self.funct_eval = opt_result[4]
self.status = rcstrings[opt_result[6]]
class opt_simplex(Optimizer):
def __init__(self, *args, **kwargs):
@ -245,6 +272,7 @@ def get_optimizer(f_min):
optimizers = {'fmin_tnc': opt_tnc,
'simplex': opt_simplex,
'lbfgsb': opt_lbfgsb,
'org-bfgs': opt_bfgs,
'scg': opt_SCG,
'adadelta':Opt_Adadelta}