mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-09 03:52:39 +02:00
Added a hack fix as suggested by max, zeroing any negative values (should really be numerically negative values on diagonal)
This commit is contained in:
parent
a56928e11a
commit
4757265b24
3 changed files with 30 additions and 20 deletions
|
|
@ -40,6 +40,7 @@ class IndependentOutputs(CombinationKernel):
|
|||
The index of the functions is given by the last column in the input X
|
||||
the rest of the columns of X are passed to the underlying kernel for computation (in blocks).
|
||||
|
||||
Kern is wrapped with a slicer metaclass
|
||||
"""
|
||||
def __init__(self, kern, index_dim=-1, name='independ'):
|
||||
assert isinstance(index_dim, int), "IndependentOutputs kernel is only defined with one input dimension being the indeces"
|
||||
|
|
|
|||
|
|
@ -95,7 +95,9 @@ class Stationary(Kern):
|
|||
#X2, = self._slice_X(X2)
|
||||
X1sq = np.sum(np.square(X),1)
|
||||
X2sq = np.sum(np.square(X2),1)
|
||||
return np.sqrt(-2.*np.dot(X, X2.T) + (X1sq[:,None] + X2sq[None,:]))
|
||||
r2 = -2.*np.dot(X, X2.T) + X1sq[:,None] + X2sq[None,:]
|
||||
r2[r2<0] = 0. # A bit hacky
|
||||
return np.sqrt(r2)
|
||||
|
||||
@Cache_this(limit=5, ignore_args=())
|
||||
def _scaled_dist(self, X, X2=None):
|
||||
|
|
|
|||
|
|
@ -120,6 +120,8 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
|
|||
|
||||
if verbose:
|
||||
print("Checking covariance function is positive definite.")
|
||||
#if isinstance(kern, GPy.kern.IndependentOutputs):
|
||||
#import ipdb; ipdb.set_trace() # XXX BREAKPOINT
|
||||
result = Kern_check_model(kern, X=X).is_positive_semi_definite()
|
||||
if result and verbose:
|
||||
print("Check passed.")
|
||||
|
|
@ -306,17 +308,22 @@ class KernelTestsNonContinuous(unittest.TestCase):
|
|||
D = self.D
|
||||
self.X = np.random.randn(N,D)
|
||||
self.X2 = np.random.randn(N1,D)
|
||||
self.X_block = np.zeros((N+N1, D+D+1))
|
||||
#self.X_block = np.zeros((N+N1, D+D+1))
|
||||
#self.X_block[0:N, 0:D] = self.X
|
||||
#self.X_block[N:N+N1, D:D+D] = self.X2
|
||||
#self.X_block[0:N, -1] = 0
|
||||
#self.X_block[N:N+N1, -1] = 1
|
||||
self.X_block = np.zeros((N+N1, D+1))
|
||||
self.X_block[0:N, 0:D] = self.X
|
||||
self.X_block[N:N+N1, D:D+D] = self.X2
|
||||
self.X_block[0:N, -1] = 1
|
||||
self.X_block[N:N+1, -1] = 2
|
||||
self.X_block[N:N+N1, 0:D] = self.X2
|
||||
self.X_block[0:N, -1] = 0
|
||||
self.X_block[N:N+N1, -1] = 1
|
||||
self.X_block = self.X_block[self.X_block.argsort(0)[:, -1], :]
|
||||
|
||||
def test_IndependentOutputs(self):
|
||||
k = GPy.kern.RBF(self.D)
|
||||
kern = GPy.kern.IndependentOutputs(k, -1)
|
||||
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X_block, X2=self.X_block, verbose=verbose))
|
||||
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X_block, verbose=verbose))
|
||||
|
||||
if __name__ == "__main__":
|
||||
print "Running unit tests, please be (very) patient..."
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue