From 46f59f9f6427af69db0097957e1374c1a03f27d6 Mon Sep 17 00:00:00 2001 From: Max Zwiessele Date: Thu, 20 Feb 2014 08:38:14 +0000 Subject: [PATCH] gradients now lazy instantiated --- GPy/core/parameterization/param.py | 11 +++++++++-- GPy/examples/dimensionality_reduction.py | 3 ++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/GPy/core/parameterization/param.py b/GPy/core/parameterization/param.py index 75d9faf2..c052099d 100644 --- a/GPy/core/parameterization/param.py +++ b/GPy/core/parameterization/param.py @@ -54,7 +54,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parentable obj._tied_to_me_ = SetDict() obj._tied_to_ = [] obj._original_ = True - obj.gradient = None + obj._gradient_ = None return obj def __init__(self, name, input_array, default_constraint=None): @@ -76,10 +76,17 @@ class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parentable self._updated_ = getattr(obj, '_updated_', None) self._original_ = getattr(obj, '_original_', None) self._name = getattr(obj, 'name', None) - self.gradient = getattr(obj, 'gradient', None) + self._gradient_ = getattr(obj, '_gradient_', None) self.constraints = getattr(obj, 'constraints', None) self.priors = getattr(obj, 'priors', None) + + @property + def gradient(self): + if self._gradient_ is None: + self._gradient_ = numpy.zeros(self._realshape_) + return self._gradient_ + #=========================================================================== # Pickling operations #=========================================================================== diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py index 2924386f..4d42026d 100644 --- a/GPy/examples/dimensionality_reduction.py +++ b/GPy/examples/dimensionality_reduction.py @@ -296,11 +296,12 @@ def bgplvm_simulation_missing_data(optimize=True, verbose=1, k = kern.linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) inan = _np.random.binomial(1, .6, size=Y.shape).astype(bool) - m = BayesianGPLVM(Y, Q, init="random", num_inducing=num_inducing, kernel=k) + m = BayesianGPLVM(Y.copy(), Q, init="random", num_inducing=num_inducing, kernel=k) m.inference_method = VarDTCMissingData() m.Y[inan] = _np.nan m.q.variance *= .1 m.parameters_changed() + m.Yreal = Y if optimize: print "Optimizing model:"