From dc637e30cb60dd38380198baaf73b85ecdb7f62c Mon Sep 17 00:00:00 2001 From: Alessandra Tosi Date: Tue, 30 Jul 2013 15:58:18 +0100 Subject: [PATCH] added jacobian and magnification factor --- GPy/models/gplvm.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py index d106cd4f..60e78461 100644 --- a/GPy/models/gplvm.py +++ b/GPy/models/gplvm.py @@ -63,6 +63,20 @@ class GPLVM(GP): return np.hstack((dL_dX.flatten(), GP._log_likelihood_gradients(self))) + def jacobian(self,X): + target = np.zeros((X.shape[0],X.shape[1],self.output_dim)) + for i in range(self.output_dim): + target[:,:,i]=self.kern.dK_dX(np.dot(self.Ki,self.likelihood.Y[:,i])[None, :],X,self.X) + return target + + def magnification(self,X): + target=np.zeros(X.shape[0]) + J = np.zeros((X.shape[0],X.shape[1],self.output_dim)) + J=self.jacobian(X) + for i in range(X.shape[0]): + target[i]=np.sqrt(pb.det(np.dot(J[i,:,:],np.transpose(J[i,:,:])))) + return target + def plot(self): assert self.likelihood.Y.shape[1] == 2 pb.scatter(self.likelihood.Y[:, 0], self.likelihood.Y[:, 1], 40, self.X[:, 0].copy(), linewidth=0, cmap=pb.cm.jet) @@ -72,3 +86,6 @@ class GPLVM(GP): def plot_latent(self, *args, **kwargs): return util.plot_latent.plot_latent(self, *args, **kwargs) + + def plot_magnification(self, *args, **kwargs): + return util.plot_latent.plot_magnification(self, *args, **kwargs)