added decorator that changes numpy invalid op warning to exception

This commit is contained in:
Daniel Beck 2015-02-09 09:28:53 +11:00
parent 1d2cbfe44a
commit 8b4274339a
2 changed files with 21 additions and 6 deletions

View file

@ -7,6 +7,19 @@ from ...util.caching import Cache_this
import itertools import itertools
import operator import operator
def numpy_invalid_op_as_exception(func):
"""
A decorator that allows catching numpy invalid operations
as exceptions (the default behaviour is raising warnings).
"""
def func_wrapper(*args, **kwargs):
np.seterr(invalid='raise')
func(*args, **kwargs)
np.seterr(invalid='warn')
return func_wrapper
class Prod(CombinationKernel): class Prod(CombinationKernel):
""" """
Computes the product of 2 kernels Computes the product of 2 kernels
@ -42,15 +55,14 @@ class Prod(CombinationKernel):
which_parts = self.parts which_parts = self.parts
return reduce(np.multiply, (p.Kdiag(X) for p in which_parts)) return reduce(np.multiply, (p.Kdiag(X) for p in which_parts))
@numpy_invalid_op_as_exception
def update_gradients_full(self, dL_dK, X, X2=None): def update_gradients_full(self, dL_dK, X, X2=None):
np.seterr(invalid='raise')
k = self.K(X,X2)*dL_dK k = self.K(X,X2)*dL_dK
try: try:
for p in self.parts: for p in self.parts:
p.update_gradients_full(k/p.K(X,X2),X,X2) p.update_gradients_full(k/p.K(X,X2),X,X2)
except FloatingPointError: except FloatingPointError:
np.seterr(invalid='warn') #print "WARNING: gradient calculation falling back to slow version due to zero-valued kernel"
print "Gradient warning: falling back to slow version due to zero-valued kernel"
for combination in itertools.combinations(self.parts, len(self.parts) - 1): for combination in itertools.combinations(self.parts, len(self.parts) - 1):
prod = reduce(operator.mul, [p.K(X, X2) for p in combination]) prod = reduce(operator.mul, [p.K(X, X2) for p in combination])
to_update = list(set(self.parts) - set(combination))[0] to_update = list(set(self.parts) - set(combination))[0]
@ -75,3 +87,5 @@ class Prod(CombinationKernel):
for p in self.parts: for p in self.parts:
target += p.gradients_X_diag(k/p.Kdiag(X),X) target += p.gradients_X_diag(k/p.Kdiag(X),X)
return target return target

View file

@ -408,10 +408,11 @@ class KernelTestsProductWithZeroValues(unittest.TestCase):
lin = GPy.kern.Linear(2) lin = GPy.kern.Linear(2)
bias = GPy.kern.Bias(2) bias = GPy.kern.Bias(2)
k = lin * bias k = lin * bias
#k = lin
m = GPy.models.GPRegression(X, Y, kernel=k) m = GPy.models.GPRegression(X, Y, kernel=k)
#m['mul.bias.variance'].constrain_fixed(0) try:
m.optimize(messages=False) m.optimize()
except np.linalg.LinAlgError:
self.fail("Zero-valued kernel raised exception!")