Merge branch 'params' of github.com:SheffieldML/GPy into params

This commit is contained in:
Alan Saul 2014-03-21 14:22:46 +00:00
commit 1c7ed4ab66
3 changed files with 32 additions and 20 deletions

View file

@ -139,11 +139,11 @@ class ODE_UY(Kern):
dVdly = 0.5/np.sqrt(ly)*np.sqrt(2*Vy) dVdly = 0.5/np.sqrt(ly)*np.sqrt(2*Vy)
dVdVy = 0.5/np.sqrt(Vy)*np.sqrt(2*ly) dVdVy = 0.5/np.sqrt(Vy)*np.sqrt(2*ly)
rd=rdist.shape[0] rd=rdist.shape
dktheta1 = np.zeros([rd,rd]) dktheta1 = np.zeros(rd)
dktheta2 = np.zeros([rd,rd]) dktheta2 = np.zeros(rd)
dkUdvar = np.zeros([rd,rd]) dkUdvar = np.zeros(rd)
dkYdvar = np.zeros([rd,rd]) dkYdvar = np.zeros(rd)
# dk dtheta for UU # dk dtheta for UU
UUdtheta1 = lambda dist: np.exp(-lu* dist)*dist + (-dist)*np.exp(-lu* dist)*(1+lu*dist) UUdtheta1 = lambda dist: np.exp(-lu* dist)*dist + (-dist)*np.exp(-lu* dist)*(1+lu*dist)

View file

@ -23,7 +23,7 @@ class Add(CombinationKernel):
If a list of parts (of this kernel!) `which_parts` is given, only If a list of parts (of this kernel!) `which_parts` is given, only
the parts of the list are taken to compute the covariance. the parts of the list are taken to compute the covariance.
""" """
assert X.shape[1] == self.input_dim assert X.shape[1] > max(np.r_[self.active_dims])
if which_parts is None: if which_parts is None:
which_parts = self.parts which_parts = self.parts
elif not isinstance(which_parts, (list, tuple)): elif not isinstance(which_parts, (list, tuple)):
@ -33,7 +33,7 @@ class Add(CombinationKernel):
@Cache_this(limit=2, force_kwargs=['which_parts']) @Cache_this(limit=2, force_kwargs=['which_parts'])
def Kdiag(self, X, which_parts=None): def Kdiag(self, X, which_parts=None):
assert X.shape[1] == self.input_dim assert X.shape[1] > max(np.r_[self.active_dims])
if which_parts is None: if which_parts is None:
which_parts = self.parts which_parts = self.parts
elif not isinstance(which_parts, (list, tuple)): elif not isinstance(which_parts, (list, tuple)):
@ -172,7 +172,7 @@ class Add(CombinationKernel):
def add(self, other, name='sum'): def add(self, other, name='sum'):
if isinstance(other, Add): if isinstance(other, Add):
other_params = other._parameters_.copy() other_params = other._parameters_[:]
for p in other_params: for p in other_params:
other.remove_parameter(p) other.remove_parameter(p)
self.add_parameters(*other_params) self.add_parameters(*other_params)

View file

@ -152,7 +152,12 @@ def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verb
if verbose: if verbose:
print("Checking gradients of Kdiag(X) wrt theta.") print("Checking gradients of Kdiag(X) wrt theta.")
try:
result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose) result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print("update_gradients_diag not implemented for " + kern.name)
if result and verbose: if result and verbose:
print("Check passed.") print("Check passed.")
if not result: if not result:
@ -240,9 +245,22 @@ class KernelGradientTestsContinuous(unittest.TestCase):
def test_Add(self): def test_Add(self):
k = GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D) k = GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k += GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize() k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Add_dims(self):
k = GPy.kern.Matern32(2, active_dims=[2,self.D]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertRaises(AssertionError, k.K, self.X)
k = GPy.kern.Matern32(2, active_dims=[2,self.D-1]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
# assert it runs:
try:
k.K(self.X)
except AssertionError:
raise AssertionError, "k.K(X) should run on self.D-1 dimension"
def test_Matern52(self): def test_Matern52(self):
k = GPy.kern.Matern52(self.D) k = GPy.kern.Matern52(self.D)
k.randomize() k.randomize()
@ -329,17 +347,11 @@ class KernelTestsNonContinuous(unittest.TestCase):
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split') kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1)) self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
class test_ODE_UY(unittest.TestCase): def test_ODE_UY(self):
def setUp(self): kern = GPy.kern.ODE_UY(2, active_dims=[0, self.D])
self.k = GPy.kern.ODE_UY(2) X = self.X[self.X[:,-1]!=2]
self.X = np.random.randn(50,2) X2 = self.X2[self.X2[:,-1]!=2]
self.X[:,1] = np.random.randint(0,2,50) self.assertTrue(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
i = np.argsort(X[:,1])
self.X = self.X[i]
self.Y = np.random.randn(50, 1)
def checkgrad(self):
m = GPy.models.GPRegression(X,Y,kernel=k)
self.assertTrue(m.checkgrad())
if __name__ == "__main__": if __name__ == "__main__":