checkgrad is zero test

This commit is contained in:
Max Zwiessele 2014-03-14 11:32:08 +00:00
parent 5f229aae2e
commit 3e5e3a099e

View file

@ -301,8 +301,7 @@ class Model(Parameterized):
denominator = (2 * np.dot(dx, gradient))
global_ratio = (f1 - f2) / np.where(denominator==0., 1e-32, denominator)
return np.abs(1. - global_ratio) < tolerance
return np.abs(1. - global_ratio) < tolerance or np.abs(f1-f2).sum() + np.abs((2 * np.dot(dx, gradient))).sum() < tolerance
else:
# check the gradient of each parameter individually, and do some pretty printing
try: