minor edits

This commit is contained in:
James Hensman 2014-02-19 10:51:12 +00:00
parent d0c563ff0a
commit 0082acad63

View file

@ -32,7 +32,7 @@ class DTC(object):
#make sure the noise is not hetero
beta = 1./np.squeeze(likelihood.variance)
if beta.size <1:
raise NotImplementedError, "no hetero noise with this implementatino of DTC"
raise NotImplementedError, "no hetero noise with this implementation of DTC"
Kmm = kern.K(Z)
Knn = kern.Kdiag(X)
@ -89,4 +89,85 @@ class DTC(object):
return post, log_marginal, grad_dict
class vDTC(object):
def __init__(self):
self.const_jitter = 1e-6
def inference(self, kern, X, X_variance, Z, likelihood, Y):
assert X_variance is None, "cannot use X_variance with DTC. Try varDTC."
#TODO: MAX! fix this!
from ...util.misc import param_to_array
Y = param_to_array(Y)
num_inducing, _ = Z.shape
num_data, output_dim = Y.shape
#make sure the noise is not hetero
beta = 1./np.squeeze(likelihood.variance)
if beta.size <1:
raise NotImplementedError, "no hetero noise with this implementation of DTC"
Kmm = kern.K(Z)
Knn = kern.Kdiag(X)
Knm = kern.K(X, Z)
U = Knm
Uy = np.dot(U.T,Y)
#factor Kmm
Kmmi, L, Li, _ = pdinv(Kmm)
# Compute A
LiUTbeta = np.dot(Li, U.T)*np.sqrt(beta)
A_ = tdot(LiUTbeta)
trace_term = -0.5*(np.sum(Knn)*beta - np.trace(A_))
A = A_ + np.eye(num_inducing)
# factor A
LA = jitchol(A)
# back substutue to get b, P, v
tmp, _ = dtrtrs(L, Uy, lower=1)
b, _ = dtrtrs(LA, tmp*beta, lower=1)
tmp, _ = dtrtrs(LA, b, lower=1, trans=1)
v, _ = dtrtrs(L, tmp, lower=1, trans=1)
tmp, _ = dtrtrs(LA, Li, lower=1, trans=0)
P = tdot(tmp.T)
#compute log marginal
log_marginal = -0.5*num_data*output_dim*np.log(2*np.pi) + \
-np.sum(np.log(np.diag(LA)))*output_dim + \
0.5*num_data*output_dim*np.log(beta) + \
-0.5*beta*np.sum(np.square(Y)) + \
0.5*np.sum(np.square(b)) + \
trace_term
# Compute dL_dKmm
vvT_P = tdot(v.reshape(-1,1)) + P
LAL = Li.T.dot(A).dot(Li)
dL_dK = Kmmi - 0.5*(vvT_P + LAL)
# Compute dL_dU
vY = np.dot(v.reshape(-1,1),Y.T)
#dL_dU = vY - np.dot(vvT_P, U.T)
dL_dU = vY - np.dot(vvT_P - Kmmi, U.T)
dL_dU *= beta
#compute dL_dR
Uv = np.dot(U, v)
dL_dR = 0.5*(np.sum(U*np.dot(U,P), 1) - 1./beta + np.sum(np.square(Y), 1) - 2.*np.sum(Uv*Y, 1) + np.sum(np.square(Uv), 1) )*beta**2
dL_dR -=beta*trace_term/num_data
grad_dict = {'dL_dKmm': dL_dK, 'dL_dKdiag':np.zeros_like(Knn) + -0.5*beta, 'dL_dKnm':dL_dU.T}
#update gradients
kern.update_gradients_sparse(X=X, Z=Z, **grad_dict)
likelihood.update_gradients(dL_dR)
#construct a posterior object
post = Posterior(woodbury_inv=Kmmi-P, woodbury_vector=v, K=Kmm, mean=None, cov=None, K_chol=L)
return post, log_marginal, grad_dict