diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 248183ff..bfdc9bb6 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -3,7 +3,7 @@ import numpy as np import pylab as pb -from matplotlib import pyplot as plt +from matplotlib import pyplot as plt, pyplot import GPy from GPy.models.mrd import MRD @@ -101,22 +101,25 @@ def oil_100(): return m def mrd_simulation(): - num = 2 + # num = 2 ard1 = np.array([1., 1, 0, 0], dtype=float) ard2 = np.array([0., 1, 1, 0], dtype=float) - ard1[ard1 == 0] = 1E+10 - ard2[ard2 == 0] = 1E+10 + ard1[ard1 == 0] = 1E-10 + ard2[ard2 == 0] = 1E-10 - make_params = lambda ard: np.hstack([[1], ard, [1, .3]]) + ard1i = 1. / ard1 + ard2i = 1. / ard2 + + # make_params = lambda ard: np.hstack([[1], ard, [1, .3]]) D1, D2, N, M, Q = 50, 100, 150, 15, 4 X = np.random.randn(N, Q) - k = GPy.kern.rbf(Q, ARD=True, lengthscale=ard1) + GPy.kern.bias(Q, 1) + GPy.kern.white(Q, 0.0001) + k = GPy.kern.rbf(Q, ARD=True, lengthscale=ard1i) + GPy.kern.bias(Q, 0) + GPy.kern.white(Q, 0.0001) Y1 = np.random.multivariate_normal(np.zeros(N), k.K(X), D1).T Y1 -= Y1.mean(0) - k = GPy.kern.rbf(Q, ARD=True, lengthscale=ard2) + GPy.kern.bias(Q, 1) + GPy.kern.white(Q, 0.0001) + k = GPy.kern.rbf(Q, ARD=True, lengthscale=ard2i) + GPy.kern.bias(Q, 0) + GPy.kern.white(Q, 0.0001) Y2 = np.random.multivariate_normal(np.zeros(N), k.K(X), D2).T Y2 -= Y2.mean(0) @@ -125,9 +128,12 @@ def mrd_simulation(): m = MRD(Y1, Y2, Q=Q, M=M, kernel=k, _debug=False) m.ensure_default_constraints() - m.optimize(messages=1, max_f_eval=5000) + fig = pyplot.figure("expected", figsize=(8, 3)) + ax = fig.add_subplot(121) + ax.bar(np.arange(ard1.size) + .1, ard1) + ax = fig.add_subplot(122) + ax.bar(np.arange(ard2.size) + .1, ard2) - import ipdb;ipdb.set_trace() return m def brendan_faces(): @@ -175,7 +181,7 @@ def BGPLVM_oil(): Q = 10 M = 30 - kernel = GPy.kern.rbf(Q, ARD = True) + GPy.kern.bias(Q) + GPy.kern.white(Q) + kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q) + GPy.kern.white(Q) m = GPy.models.Bayesian_GPLVM(X, Q, kernel=kernel, M=M) # m.scale_factor = 100.0 m.constrain_positive('(white|noise|bias|X_variance|rbf_variance|rbf_length)') diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 7d3b1737..6f9c97f6 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -52,7 +52,7 @@ class kern(parameterised): parameterised.__init__(self) - def plot_ARD(self): + def plot_ARD(self, ax=pb.gca()): """ If an ARD kernel is present, it bar-plots the ARD parameters @@ -60,16 +60,16 @@ class kern(parameterised): """ for p in self.parts: if hasattr(p, 'ARD') and p.ARD: - pb.figure() - pb.title('ARD parameters, %s kernel' % p.name) + ax.set_title('ARD parameters, %s kernel' % p.name) if p.name == 'linear': ard_params = p.variances else: ard_params = 1./p.lengthscale - pb.bar(np.arange(len(ard_params))-0.4, ard_params) - + ax.bar(np.arange(len(ard_params)) - 0.4, ard_params) + ax.set_xticks(np.arange(len(ard_params)), + ["${}$".format(i + 1) for i in range(len(ard_params))]) def _transform_gradients(self,g): diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index 0ba4a695..842ef42f 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -8,6 +8,8 @@ from GPy.models.Bayesian_GPLVM import Bayesian_GPLVM import numpy from GPy.models.sparse_GP import sparse_GP import itertools +from matplotlib import pyplot +import pylab class MRD(model): @@ -103,17 +105,17 @@ class MRD(model): def _set_params(self, x): start = 0; end = self.NQ - X = x[start:end].reshape(self.N, self.Q) + X = x[start:end].reshape(self.N, self.Q).copy() start = end; end += start - X_var = x[start:end].reshape(self.N, self.Q) + X_var = x[start:end].reshape(self.N, self.Q).copy() start = end; end += self.MQ - Z = x[start:end].reshape(self.M, self.Q) + Z = x[start:end].reshape(self.M, self.Q).copy() thetas = x[end:] # set params for all others: for g, s, e in itertools.izip(self.bgplvms, self.nparams, self.nparams[1:]): self._set_var_params(g, X, X_var, Z) - self._set_kern_params(g, thetas[s:e]) + self._set_kern_params(g, thetas[s:e].copy()) g._compute_kernel_matrices() g._computations() @@ -135,5 +137,7 @@ class MRD(model): for g in self.bgplvms[1:]]))) def plot_scales(self): - pass - + fig = pylab.figure("MRD Scales", figsize=(4 * len(self.bgplvms), 3)) + for i, g in enumerate(self.bgplvms): + ax = fig.add_subplot(1, len(self.bgplvms), i + 1) + g.kern.plot_ARD(ax=ax)