mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-09 12:02:38 +02:00
fix: predictive_gradients for new posterior class
This commit is contained in:
parent
14046705cf
commit
6fc0cc630c
1 changed files with 11 additions and 3 deletions
|
|
@ -339,9 +339,17 @@ class GP(Model):
|
||||||
# gradients wrt the diagonal part k_{xx}
|
# gradients wrt the diagonal part k_{xx}
|
||||||
dv_dX = kern.gradients_X(np.eye(Xnew.shape[0]), Xnew)
|
dv_dX = kern.gradients_X(np.eye(Xnew.shape[0]), Xnew)
|
||||||
#grads wrt 'Schur' part K_{xf}K_{ff}^{-1}K_{fx}
|
#grads wrt 'Schur' part K_{xf}K_{ff}^{-1}K_{fx}
|
||||||
alpha = -2.*np.dot(kern.K(Xnew, self._predictive_variable), self.posterior.woodbury_inv)
|
if self.posterior.woodbury_inv.ndim == 3:
|
||||||
dv_dX += kern.gradients_X(alpha, Xnew, self._predictive_variable)
|
tmp = np.empty(dv_dX.shape + (self.posterior.woodbury_inv.shape[2],))
|
||||||
return mean_jac, dv_dX
|
tmp[:] = dv_dX[:,:,None]
|
||||||
|
for i in range(self.posterior.woodbury_inv.shape[2]):
|
||||||
|
alpha = -2.*np.dot(kern.K(Xnew, self._predictive_variable), self.posterior.woodbury_inv[:, :, i])
|
||||||
|
tmp[:, :, i] += kern.gradients_X(alpha, Xnew, self._predictive_variable)
|
||||||
|
else:
|
||||||
|
tmp = dv_dX
|
||||||
|
alpha = -2.*np.dot(kern.K(Xnew, self._predictive_variable), self.posterior.woodbury_inv)
|
||||||
|
tmp += kern.gradients_X(alpha, Xnew, self._predictive_variable)
|
||||||
|
return mean_jac, tmp
|
||||||
|
|
||||||
def predict_jacobian(self, Xnew, kern=None, full_cov=False):
|
def predict_jacobian(self, Xnew, kern=None, full_cov=False):
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue