[grads x]

This commit is contained in:
Max Zwiessele 2016-06-08 14:28:25 +01:00
parent 0c6e3bc88f
commit b1fd7c9aaf
3 changed files with 12 additions and 12 deletions

View file

@ -377,7 +377,7 @@ class GP(Model):
if full_cov:
dK2_dXdX = kern.gradients_XX(one, Xnew)
else:
dK2_dXdX = kern.gradients_XX(one, Xnew).sum(0)
dK2_dXdX = kern.gradients_XX_diag(one, Xnew)
#dK2_dXdX = np.zeros((Xnew.shape[0], Xnew.shape[1], Xnew.shape[1]))
#for i in range(Xnew.shape[0]):
# dK2_dXdX[i:i+1,:,:] = kern.gradients_XX(one, Xnew[i:i+1,:])

View file

@ -10,10 +10,10 @@ class Integral(Kern): #todo do I need to inherit from Stationary
"""
Integral kernel between...
"""
def __init__(self, input_dim, variances=None, lengthscale=None, ARD=False, active_dims=None, name='integral'):
super(Integral, self).__init__(input_dim, active_dims, name)
if lengthscale is None:
lengthscale = np.ones(1)
else:
@ -22,7 +22,7 @@ class Integral(Kern): #todo do I need to inherit from Stationary
self.lengthscale = Param('lengthscale', lengthscale, Logexp()) #Logexp - transforms to allow positive only values...
self.variances = Param('variances', variances, Logexp()) #and here.
self.link_parameters(self.variances, self.lengthscale) #this just takes a list of parameters we need to optimise.
def h(self, z):
return 0.5 * z * np.sqrt(math.pi) * math.erf(z) + np.exp(-(z**2))
@ -36,13 +36,13 @@ class Integral(Kern): #todo do I need to inherit from Stationary
for i,x in enumerate(X):
for j,x2 in enumerate(X):
dK_dl[i,j] = self.variances[0]*self.dk_dl(x[0],x2[0],self.lengthscale[0]) #TODO Multiple length scales
dK_dv[i,j] = self.k_xx(x[0],x2[0],self.lengthscale[0]) #the gradient wrt the variance is k_xx.
dK_dv[i,j] = self.k_xx(x[0],x2[0],self.lengthscale[0]) #the gradient wrt the variance is k_xx.
self.lengthscale.gradient = np.sum(dK_dl * dL_dK)
self.variances.gradient = np.sum(dK_dv * dL_dK)
#print "V%0.5f" % self.variances.gradient
#print "L%0.5f" % self.lengthscale.gradient
else: #we're finding dK_xf/Dtheta
print "NEED TO HANDLE TODO!"
else: #we're finding dK_xf/Dtheta
print("NEED TO HANDLE TODO!")
#useful little function to help calculate the covariances.
def g(self,z):
@ -52,15 +52,15 @@ class Integral(Kern): #todo do I need to inherit from Stationary
def k_xx(self,t,tprime,l):
return 0.5 * (l**2) * ( self.g(t/l) - self.g((t - tprime)/l) + self.g(tprime/l) - 1)
def k_ff(self,t,tprime,l):
def k_ff(self,t,tprime,l):
return np.exp(-((t-tprime)**2)/(l**2)) #rbf
#covariance between the gradient and the actual value
def k_xf(self,t,tprime,l):
return 0.5 * np.sqrt(math.pi) * l * (math.erf((t-tprime)/l) + math.erf(tprime/l))
def K(self, X, X2=None):
if X2 is None:
if X2 is None:
K_xx = np.zeros([X.shape[0],X.shape[0]])
for i,x in enumerate(X):
for j,x2 in enumerate(X):
@ -73,7 +73,7 @@ class Integral(Kern): #todo do I need to inherit from Stationary
K_xf[i,j] = self.k_xf(x[0],x2[0],self.lengthscale[0])
#print self.variances[0]
return K_xf * self.variances[0]
def Kdiag(self, X):
"""I've used the fact that we call this method for K_ff when finding the covariance as a hack so
I know if I should return K_ff or K_xx. In this case we're returning K_ff!!

View file

@ -273,7 +273,7 @@ class Stationary(Kern):
dL2_dXdX: [NxQxQ]
"""
dL_dK_diag = dL_dK_diag.copy().reshape(-1, 1, 1)
assert dL_dK_diag.size == X.shape[0], "dL_dK_diag has to be given as row [N] or column vector [Nx1]"
assert (dL_dK_diag.size == X.shape[0]) or (dL_dK_diag.size == 1), "dL_dK_diag has to be given as row [N] or column vector [Nx1]"
l4 = np.ones(X.shape[1])*self.lengthscale**2
return dL_dK_diag * (np.eye(X.shape[1]) * -self.dK2_drdr_diag()/(l4))[None, :,:]# np.zeros(X.shape+(X.shape[1],))