From 1294c24a28bc46b6d8e47b4a820589f454290093 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Mon, 24 Mar 2014 13:33:16 +0000 Subject: [PATCH] mrd and bgplvm updates to conform new vardtc --- GPy/examples/dimensionality_reduction.py | 17 +++++++----- GPy/models/bayesian_gplvm.py | 8 ++++++ GPy/models/mrd.py | 34 ++++++++++++++---------- 3 files changed, 38 insertions(+), 21 deletions(-) diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index ea997d63..8171a032 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -277,7 +277,9 @@ def bgplvm_simulation(optimize=True, verbose=1, k = kern.Linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) #k = kern.RBF(Q, ARD=True, lengthscale=10.) m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k) - + m.X.variance[:] = _np.random.uniform(0,.01,m.X.shape) + m.likelihood.variance = .1 + if optimize: print "Optimizing model:" m.optimize('bfgs', messages=verbose, max_iters=max_iters, @@ -299,15 +301,16 @@ def bgplvm_simulation_missing_data(optimize=True, verbose=1, _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) Y = Ylist[0] k = kern.Linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) - + inan = _np.random.binomial(1, .6, size=Y.shape).astype(bool) m = BayesianGPLVM(Y.copy(), Q, init="random", num_inducing=num_inducing, kernel=k) m.inference_method = VarDTCMissingData() m.Y[inan] = _np.nan - m.X.variance *= .1 + m.X.variance[:] = _np.random.uniform(0,.01,m.X.shape) + m.likelihood.variance = .01 m.parameters_changed() m.Yreal = Y - + if optimize: print "Optimizing model:" m.optimize('bfgs', messages=verbose, max_iters=max_iters, @@ -325,11 +328,11 @@ def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw): D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) - + #Ylist = [Ylist[0]] - k = [kern.Linear(Q, ARD=True) + kern.White(Q, 1e-4) for _ in range(len(Ylist))] + k = [kern.Linear(Q, ARD=True) for _ in range(len(Ylist))] m = MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernel=k, initx="", initz='permute', **kw) - + m['.*noise'] = [Y.var()/500. for Y in Ylist] #for i, Y in enumerate(Ylist): # m['.*Y_{}.*Gaussian.*noise'.format(i)] = Y.var(1) / 500. diff --git a/GPy/models/bayesian_gplvm.py b/GPy/models/bayesian_gplvm.py index a3ebdb7d..ef3462f6 100644 --- a/GPy/models/bayesian_gplvm.py +++ b/GPy/models/bayesian_gplvm.py @@ -50,6 +50,14 @@ class BayesianGPLVM(SparseGP): self.variational_prior = NormalPrior() X = NormalPosterior(X, X_variance) + if inference_method is None: + if np.any(np.isnan(Y)): + from ..inference.latent_function_inference.var_dtc import VarDTCMissingData + inference_method = VarDTCMissingData() + else: + from ..inference.latent_function_inference.var_dtc import VarDTC + inference_method = VarDTC() + SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs) self.add_parameter(self.X, index=0) diff --git a/GPy/models/mrd.py b/GPy/models/mrd.py index 177ddc19..36088e35 100644 --- a/GPy/models/mrd.py +++ b/GPy/models/mrd.py @@ -51,24 +51,25 @@ class MRD(Model): inference_method=None, likelihood=None, name='mrd', Ynames=None): super(MRD, self).__init__(name) + self.input_dim = input_dim + self.num_inducing = num_inducing + + self.Ylist = Ylist + self._in_init_ = True + X, fracs = self._init_X(initx, Ylist) + self.Z = Param('inducing inputs', self._init_Z(initz, X)) + self.num_inducing = self.Z.shape[0] # ensure M==N if M>N + # sort out the kernels if kernel is None: from ..kern import RBF - self.kern = [RBF(input_dim, ARD=1, name='rbf'.format(i)) for i in range(len(Ylist))] + self.kern = [RBF(input_dim, ARD=1, lengthscale=fracs[i], name='rbf'.format(i)) for i in range(len(Ylist))] elif isinstance(kernel, Kern): self.kern = [kernel.copy(name='{}'.format(kernel.name, i)) for i in range(len(Ylist))] else: assert len(kernel) == len(Ylist), "need one kernel per output" assert all([isinstance(k, Kern) for k in kernel]), "invalid kernel object detected!" self.kern = kernel - self.input_dim = input_dim - self.num_inducing = num_inducing - - self.Ylist = Ylist - self._in_init_ = True - X = self._init_X(initx, Ylist) - self.Z = Param('inducing inputs', self._init_Z(initz, X)) - self.num_inducing = self.Z.shape[0] # ensure M==N if M>N if X_variance is None: X_variance = np.random.uniform(0, .1, X.shape) @@ -108,8 +109,7 @@ class MRD(Model): self._log_marginal_likelihood = 0 self.posteriors = [] self.Z.gradient = 0. - self.X.mean.gradient = 0. - self.X.variance.gradient = 0. + self.X.gradient = 0. for y, k, l, i in itertools.izip(self.Ylist, self.kern, self.likelihood, self.inference_method): posterior, lml, grad_dict = i.inference(k, self.X, self.Z, l, y) @@ -147,14 +147,20 @@ class MRD(Model): if Ylist is None: Ylist = self.Ylist if init in "PCA_concat": - X = initialize_latent('PCA', np.hstack(Ylist), self.input_dim) + X, fracs = initialize_latent('PCA', self.input_dim, np.hstack(Ylist)) + fracs = [fracs]*self.input_dim elif init in "PCA_single": X = np.zeros((Ylist[0].shape[0], self.input_dim)) + fracs = [] for qs, Y in itertools.izip(np.array_split(np.arange(self.input_dim), len(Ylist)), Ylist): - X[:, qs] = initialize_latent('PCA', Y, len(qs)) + x,frcs = initialize_latent('PCA', len(qs), Y) + X[:, qs] = x + fracs.append(frcs) else: # init == 'random': X = np.random.randn(Ylist[0].shape[0], self.input_dim) - return X + fracs = X.var(0) + fracs = [fracs]*self.input_dim + return X, fracs def _init_Z(self, init="permute", X=None): if X is None: