GPLVM demo working

This commit is contained in:
James Hensman 2012-11-30 10:31:02 +00:00
parent 4b4f1da128
commit 5f92ff6785
4 changed files with 13 additions and 14 deletions

View file

@ -170,12 +170,12 @@ class kern(parameterised):
[p.dK_dtheta(partial,X[s1,i_s],X2[s2,i_s],target[ps]) for p,i_s,ps,s1,s2 in zip(self.parts, self.input_slices, self.param_slices, slices1, slices2)]
return target
def dK_dX(self,X,X2=None,slices1=None,slices2=None):
def dK_dX(self,partial,X,X2=None,slices1=None,slices2=None):
if X2 is None:
X2 = X
slices1, slices2 = self._process_slices(slices1,slices2)
target = np.zeros((X2.shape[0],X.shape[0],X.shape[1]))
[p.dK_dX(X[s1],X2[s2],target[s2,s1,:]) for p,ps,s1,s2 in zip(self.parts, self.param_slices,slices1,slices2)]
target = np.zeros_like(X)
[p.dK_dX(partial,X[s1],X2[s2],target[s1,:]) for p,ps,s1,s2 in zip(self.parts, self.param_slices,slices1,slices2)]
return target
def Kdiag(self,X,slices=None):

View file

@ -6,7 +6,7 @@ from kernpart import kernpart
import numpy as np
import hashlib
class rbf(kernpart):
class rbf(kernpart):
"""
Radial Basis Function kernel, aka squared-exponential or Gaussian kernel.
@ -55,13 +55,15 @@ class rbf(kernpart):
target[0] += np.sum(self._K_dvar*partial)
target[1] += np.sum(self._K_dvar*self.variance*self._K_dist2/self.lengthscale*partial)
def dKdiag_dtheta(self,X,target):
target[0] += partial
def dKdiag_dtheta(self,partial,X,target):
#NB: derivative of diagonal elements wrt lengthscale is 0
target[0] += np.sum(partial)
def dK_dX(self,X,X2,target):
def dK_dX(self,partial,X,X2,target):
self._K_computations(X,X2)
_K_dist = X[:,None,:]-X2[None,:,:]
target += np.transpose(-self.variance*self._K_dvar[:,:,np.newaxis]*_K_dist/self.lengthscale2,(1,0,2))
dK_dX = np.transpose(-self.variance*self._K_dvar[:,:,np.newaxis]*_K_dist/self.lengthscale2,(1,0,2))
target += np.sum(dK_dX*partial[:,:,None],1)
def dKdiag_dX(self,X,target):
pass

View file

@ -45,7 +45,7 @@ class white(kernpart):
def dKdiag_dtheta(self,X,target):
np.add(target[:,0],1.,target[:,0])
def dK_dX(self,X,X2,target):
def dK_dX(self,partial,X,X2,target):
pass
def dKdiag_dX(self,X,target):

View file

@ -46,11 +46,8 @@ class GPLVM(GP_regression):
def log_likelihood_gradients(self):
dL_dK = self.dL_dK()
dK_dtheta = self.kern.dK_dtheta(self.X)
dL_dtheta = (dK_dtheta*dL_dK[:,:,None]).sum(0).sum(0)
dK_dX = self.kern.dK_dX(self.X)
dL_dX = 2.*np.sum(dL_dK[:,:,None]*dK_dX,0)
dL_dtheta = self.kern.dK_dtheta(dL_dK,self.X)
dL_dX = 2*self.kern.dK_dX(dL_dK,self.X)
return np.hstack((dL_dX.flatten(),dL_dtheta))