Merge branch 'devel' of github.com:SheffieldML/GPy into devel

This commit is contained in:
Nicolò Fusi 2013-05-10 16:21:45 +02:00
commit cbfbcfb411
5 changed files with 30 additions and 26 deletions

View file

@ -49,6 +49,7 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto
function_eval = 1
fnow = fold
gradnew = gradf(x, *optargs) # Initial gradient.
current_grad = np.dot(gradnew, gradnew)
gradold = gradnew.copy()
d = -gradnew # Initial search direction.
success = True # Force calculation of directional derivs.
@ -110,7 +111,7 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto
iteration += 1
if display:
print '\r',
print 'i: {0:>5g} f:{1:> 12e} b:{2:> 12e} |g|:{3:> 12e}'.format(iteration, fnow, beta, np.dot(gradnew, gradnew)),
print 'i: {0:>5g} f:{1:> 12e} b:{2:> 12e} |g|:{3:> 12e}'.format(iteration, fnow, beta, current_grad),
# print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r',
sys.stdout.flush()
@ -125,8 +126,9 @@ def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=500, display=True, xto
fold = fnew
gradold = gradnew
gradnew = gradf(x, *optargs)
current_grad = np.dot(gradnew, gradnew)
# If the gradient is zero then we are done.
if (np.dot(gradnew, gradnew)) <= gtol:
if current_grad <= gtol:
status = 'converged'
return x, flog, function_eval, status