diff --git a/GPy/examples/oil_flow_demo.py b/GPy/examples/oil_flow_demo.py index f977e18d..4bcdfa63 100644 --- a/GPy/examples/oil_flow_demo.py +++ b/GPy/examples/oil_flow_demo.py @@ -31,7 +31,7 @@ N, D = Y.shape Y -= Y.mean(axis=0) #Y /= Y.std(axis=0) -Q = 7 +Q = 10 k = GPy.kern.rbf_ARD(Q) + GPy.kern.white(Q) m = GPy.models.Bayesian_GPLVM(Y, Q, kernel = k, M = 12) m.constrain_positive('(rbf|bias|S|white|noise)') diff --git a/GPy/models/BGPLVM.py b/GPy/models/BGPLVM.py index 2c760ec3..3fc257e9 100644 --- a/GPy/models/BGPLVM.py +++ b/GPy/models/BGPLVM.py @@ -57,12 +57,6 @@ class Bayesian_GPLVM(sparse_GP_regression, GPLVM): return np.hstack((dL_dmu.flatten(), dL_dS.flatten())) - def log_likelihood_gradients(self): return np.hstack((self.dL_dmuS().flatten(), sparse_GP_regression.log_likelihood_gradients(self))) - def plot(self): - GPLVM.plot(self) - #passing Z without a small amout of jitter will induce the white kernel where we don;t want it! - mu, var = sparse_GP_regression.predict(self, self.Z+np.random.randn(*self.Z.shape)*0.0001) - pb.plot(mu[:, 0] , mu[:, 1], 'ko')