added support for partial derivatives to ARD kern

This commit is contained in:
Nicolo Fusi 2012-11-30 16:11:25 +00:00
parent 31b7a0520e
commit cb5a2c2353

View file

@ -21,7 +21,7 @@ class linear_ARD(kernpart):
assert variances.shape==(self.D,) assert variances.shape==(self.D,)
else: else:
variances = np.ones(self.D) variances = np.ones(self.D)
self.Nparam = self.D self.Nparam = int(self.D)
self.name = 'linear' self.name = 'linear'
self.set_param(variances) self.set_param(variances)
@ -46,22 +46,12 @@ class linear_ARD(kernpart):
def Kdiag(self,X,target): def Kdiag(self,X,target):
np.add(target,np.sum(self.variances*np.square(X),-1),target) np.add(target,np.sum(self.variances*np.square(X),-1),target)
def dK_dtheta(self,X,X2,target): def dK_dtheta(self,partial,X,X2,target):
"""
Computes the derivatives wrt theta
Return shape is NxMx(Ntheta)
"""
if X2 is None: X2 = X
product = X[:,None,:]*X2[None,:,:] product = X[:,None,:]*X2[None,:,:]
target += product target += (partial[:,:,None]*product).sum(0).sum(0)
def dK_dX(self,X,X2,target): def dK_dX(self,partial,X,X2,target):
if X2 is None: X2 = X target += (((X[:, None, :] * self.variances) + target) * partial[:,:, None]).sum(0)
#product = X[:,None,:]*X2[None,:,:]
#scaled_product = product/self.variances2
np.add(target,X2[:,None,:]*self.variances,target)
def psi0(self,Z,mu,S,target): def psi0(self,Z,mu,S,target):
expected = np.square(mu) + S expected = np.square(mu) + S