Got rid of some overdoing the approximation

This commit is contained in:
Alan Saul 2013-07-29 17:21:52 +01:00
parent aa98608590
commit fdb7b99e0b
2 changed files with 6 additions and 6 deletions

View file

@ -165,7 +165,7 @@ class Laplace(likelihood):
self.aA = 0.5*self.ln_det_K_Wi__Bi self.aA = 0.5*self.ln_det_K_Wi__Bi
self.bB = - 0.5*self.f_Ki_f self.bB = - 0.5*self.f_Ki_f
self.cC = 0.5*self.y_Wi_Ki_i_y self.cC = 0.5*self.y_Wi_Ki_i_y
Z_tilde = (+ 100*self.NORMAL_CONST Z_tilde = (#+ 100*self.NORMAL_CONST
+ self.lik + self.lik
+ 0.5*self.ln_det_K_Wi__Bi + 0.5*self.ln_det_K_Wi__Bi
- 0.5*self.f_Ki_f - 0.5*self.f_Ki_f

View file

@ -132,9 +132,9 @@ class GP(model):
model for a new variable Y* = v_tilde/tau_tilde, with a covariance model for a new variable Y* = v_tilde/tau_tilde, with a covariance
matrix K* = K + diag(1./tau_tilde) plus a normalization term. matrix K* = K + diag(1./tau_tilde) plus a normalization term.
""" """
if isinstance(self.likelihood, Laplace): #if isinstance(self.likelihood, Laplace):
self.likelihood.fit_full(self.kern.K(self.X)) #self.likelihood.fit_full(self.kern.K(self.X))
self.likelihood._set_params(self.likelihood._get_params()) #self.likelihood._set_params(self.likelihood._get_params())
l = -0.5 * self.D * self.K_logdet + self._model_fit_term() + self.likelihood.Z l = -0.5 * self.D * self.K_logdet + self._model_fit_term() + self.likelihood.Z
print "K_ldet: {} mft: {} Z: {}".format(self.K_logdet, self._model_fit_term(), self.likelihood.Z) print "K_ldet: {} mft: {} Z: {}".format(self.K_logdet, self._model_fit_term(), self.likelihood.Z)
return l return l
@ -148,8 +148,8 @@ class GP(model):
dL_dthetaK = self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X) dL_dthetaK = self.kern.dK_dtheta(dL_dK=self.dL_dK, X=self.X)
print "dL_dthetaK should be: ", dL_dthetaK print "dL_dthetaK should be: ", dL_dthetaK
if isinstance(self.likelihood, Laplace): if isinstance(self.likelihood, Laplace):
self.likelihood.fit_full(self.kern.K(self.X)) #self.likelihood.fit_full(self.kern.K(self.X))
self.likelihood._set_params(self.likelihood._get_params()) #self.likelihood._set_params(self.likelihood._get_params())
dK_dthetaK = self.kern.dK_dtheta dK_dthetaK = self.kern.dK_dtheta
dL_dthetaK = self.likelihood._Kgradients(dK_dthetaK, self.X.copy()) dL_dthetaK = self.likelihood._Kgradients(dK_dthetaK, self.X.copy())
dL_dthetaL = self.likelihood._gradients(partial=np.diag(self.dL_dK)) dL_dthetaL = self.likelihood._gradients(partial=np.diag(self.dL_dK))