diff --git a/GPy/models/bcgplvm.py b/GPy/models/bcgplvm.py index 899bb2f8..7462fa27 100644 --- a/GPy/models/bcgplvm.py +++ b/GPy/models/bcgplvm.py @@ -1,11 +1,11 @@ -# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt). +# Copyright (c) 2015 James Hensman # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np from ..core import GP -from ..models import GPLVM -from ..mappings import * +from . import GPLVM +from .. import mappings class BCGPLVM(GPLVM): @@ -16,33 +16,31 @@ class BCGPLVM(GPLVM): :type Y: np.ndarray :param input_dim: latent dimensionality :type input_dim: int - :param init: initialisation method for the latent space - :type init: 'PCA'|'random' :param mapping: mapping for back constraint :type mapping: GPy.core.Mapping object """ - def __init__(self, Y, input_dim, init='PCA', X=None, kernel=None, normalize_Y=False, mapping=None): - + def __init__(self, Y, input_dim, kernel=None, mapping=None): + + if mapping is None: - mapping = Kernel(X=Y, output_dim=input_dim) + mapping = mappings.MLP(input_dim=Y.shape[1], + output_dim=input_dim, + hidden_dim=10) + else: + assert mapping.input_dim==Y.shape[1], "mapping input dim does not work for Y dimension" + assert mapping.output_dim==input_dim, "mapping output dim does not work for self.input_dim" + GPLVM.__init__(self, Y, input_dim, X=mapping.f(Y), kernel=kernel, name="bcgplvm") + self.unlink_parameter(self.X) self.mapping = mapping - GPLVM.__init__(self, Y, input_dim, init, X, kernel, normalize_Y) - self.X = self.mapping.f(self.likelihood.Y) + self.link_parameter(self.mapping) - def _get_param_names(self): - return self.mapping._get_param_names() + GP._get_param_names(self) + self.X = self.mapping.f(self.Y) - def _get_params(self): - return np.hstack((self.mapping._get_params(), GP._get_params(self))) + def parameters_changed(self): + self.X = self.mapping.f(self.Y) + GP.parameters_changed(self) + Xgradient = self.kern.gradients_X(self.grad_dict['dL_dK'], self.X, None) + self.mapping.update_gradients(Xgradient, self.Y) - def _set_params(self, x): - self.mapping._set_params(x[:self.mapping.num_params]) - self.X = self.mapping.f(self.likelihood.Y) - GP._set_params(self, x[self.mapping.num_params:]) - - def _log_likelihood_gradients(self): - dL_df = self.kern.gradients_X(self.dL_dK, self.X) - dL_dtheta = self.mapping.df_dtheta(dL_df, self.likelihood.Y) - return np.hstack((dL_dtheta.flatten(), GP._log_likelihood_gradients(self))) diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py index 6318829d..d6f29907 100644 --- a/GPy/models/gplvm.py +++ b/GPy/models/gplvm.py @@ -58,12 +58,15 @@ class GPLVM(GP): return target def plot(self): - assert self.likelihood.Y.shape[1] == 2 - pb.scatter(self.likelihood.Y[:, 0], self.likelihood.Y[:, 1], 40, self.X[:, 0].copy(), linewidth=0, cmap=pb.cm.jet) # @UndefinedVariable + assert self.Y.shape[1] == 2, "too high dimensional to plot. Try plot_latent" + from matplotlib import pyplot as plt + plt.scatter(self.Y[:, 0], + self.Y[:, 1], + 40, self.X[:, 0].copy(), + linewidth=0, cmap=plt.cm.jet) Xnew = np.linspace(self.X.min(), self.X.max(), 200)[:, None] mu, _ = self.predict(Xnew) - import pylab as pb - pb.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5) + plt.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5) def plot_latent(self, labels=None, which_indices=None, resolution=50, ax=None, marker='o', s=40, diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index 04414d98..f7cacb13 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -383,6 +383,16 @@ class GradientTests(np.testing.TestCase): m = GPy.models.GPLVM(Y, input_dim, kernel=k) self.assertTrue(m.checkgrad()) + def test_BCGPLVM_rbf_bias_white_kern_2D(self): + """ Testing GPLVM with rbf + bias kernel """ + N, input_dim, D = 50, 1, 2 + X = np.random.rand(N, input_dim) + k = GPy.kern.RBF(input_dim, 0.5, 0.9 * np.ones((1,))) + GPy.kern.Bias(input_dim, 0.1) + GPy.kern.White(input_dim, 0.05) + K = k.K(X) + Y = np.random.multivariate_normal(np.zeros(N), K, input_dim).T + m = GPy.models.BCGPLVM(Y, input_dim, kernel=k) + self.assertTrue(m.checkgrad()) + def test_GPLVM_rbf_linear_white_kern_2D(self): """ Testing GPLVM with rbf + bias kernel """ N, input_dim, D = 50, 1, 2