eigenvalue decomposition of psi2

This commit is contained in:
James Hensman 2013-05-01 15:08:55 +01:00
parent db3646c3f3
commit 9df555b51b
4 changed files with 19 additions and 7 deletions

View file

@ -176,12 +176,13 @@ def bgplvm_simulation_matlab_compare():
Y = sim_data['Y'] Y = sim_data['Y']
S = sim_data['S'] S = sim_data['S']
mu = sim_data['mu'] mu = sim_data['mu']
M, [_, Q] = 20, mu.shape M, (_, Q) = 30, mu.shape
from GPy.models import mrd from GPy.models import mrd
from GPy import kern from GPy import kern
reload(mrd); reload(kern) reload(mrd); reload(kern)
k = kern.rbf(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) #k = kern.rbf(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2))
k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2))
m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k, m = Bayesian_GPLVM(Y, Q, init="PCA", M=M, kernel=k,
# X=mu, # X=mu,
# X_variance=S, # X_variance=S,

View file

@ -76,8 +76,13 @@ class sparse_GP(GP):
assert self.likelihood.D == 1 #TODO: what if the likelihood is heterscedatic and there are multiple independent outputs? assert self.likelihood.D == 1 #TODO: what if the likelihood is heterscedatic and there are multiple independent outputs?
if self.has_uncertain_inputs: if self.has_uncertain_inputs:
self.psi2_beta_scaled = (self.psi2*(self.likelihood.precision.flatten().reshape(self.N,1,1)/sf2)).sum(0) self.psi2_beta_scaled = (self.psi2*(self.likelihood.precision.flatten().reshape(self.N,1,1)/sf2)).sum(0)
tmp, _ = linalg.lapack.flapack.dtrtrs(self.Lm,self.psi2_beta_scaled.T,lower=1) evals, evecs = linalg.eigh(self.psi2_beta_scaled)
self.A, _ = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(tmp.T),lower=1) clipped_evals = np.clip(evals,0.,1e6) # TODO: make clipping configurable
if not np.allclose(evals, clipped_evals):
print "Warning: clipping posterior eigenvalues"
tmp = evecs*np.sqrt(clipped_evals)
tmp, _ = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(tmp),lower=1)
self.A = tdot(tmp)
else: else:
tmp = self.psi1*(np.sqrt(self.likelihood.precision.flatten().reshape(1,self.N))/sf) tmp = self.psi1*(np.sqrt(self.likelihood.precision.flatten().reshape(1,self.N))/sf)
self.psi2_beta_scaled = tdot(tmp) self.psi2_beta_scaled = tdot(tmp)
@ -86,8 +91,14 @@ class sparse_GP(GP):
else: else:
if self.has_uncertain_inputs: if self.has_uncertain_inputs:
self.psi2_beta_scaled = (self.psi2*(self.likelihood.precision/sf2)).sum(0) self.psi2_beta_scaled = (self.psi2*(self.likelihood.precision/sf2)).sum(0)
tmp, _ = linalg.lapack.flapack.dtrtrs(self.Lm,self.psi2_beta_scaled.T,lower=1) evals, evecs = linalg.eigh(self.psi2_beta_scaled)
self.A, _ = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(tmp.T),lower=1) clipped_evals = np.clip(evals,0.,1e6) # TODO: make clipping configurable
if not np.allclose(evals, clipped_evals):
print "Warning: clipping posterior eigenvalues"
tmp = evecs*np.sqrt(clipped_evals)
self.psi2_beta_scaled = tdot(tmp)
tmp, _ = linalg.lapack.flapack.dtrtrs(self.Lm,np.asfortranarray(tmp),lower=1)
self.A = tdot(tmp)
else: else:
tmp = self.psi1*(np.sqrt(self.likelihood.precision)/sf) tmp = self.psi1*(np.sqrt(self.likelihood.precision)/sf)
self.psi2_beta_scaled = tdot(tmp) self.psi2_beta_scaled = tdot(tmp)

Binary file not shown.

View file

@ -72,7 +72,7 @@ def jitchol(A,maxtries=5):
raise linalg.LinAlgError, "not pd: negative diagonal elements" raise linalg.LinAlgError, "not pd: negative diagonal elements"
jitter= diagA.mean()*1e-6 jitter= diagA.mean()*1e-6
for i in range(1,maxtries+1): for i in range(1,maxtries+1):
print '\rWarning: adding jitter of {:.10e} '.format(jitter), print 'Warning: adding jitter of {:.10e}'.format(jitter)
try: try:
return linalg.cholesky(A+np.eye(A.shape[0]).T*jitter, lower = True) return linalg.cholesky(A+np.eye(A.shape[0]).T*jitter, lower = True)
except: except: