gradients now lazy instantiated

This commit is contained in:
Max Zwiessele 2014-02-20 08:38:14 +00:00
parent 1c3fe0c51e
commit 46f59f9f64
2 changed files with 11 additions and 3 deletions

View file

@ -54,7 +54,7 @@ class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parentable
obj._tied_to_me_ = SetDict()
obj._tied_to_ = []
obj._original_ = True
obj.gradient = None
obj._gradient_ = None
return obj
def __init__(self, name, input_array, default_constraint=None):
@ -76,10 +76,17 @@ class Param(ObservableArray, Constrainable, Gradcheckable, Indexable, Parentable
self._updated_ = getattr(obj, '_updated_', None)
self._original_ = getattr(obj, '_original_', None)
self._name = getattr(obj, 'name', None)
self.gradient = getattr(obj, 'gradient', None)
self._gradient_ = getattr(obj, '_gradient_', None)
self.constraints = getattr(obj, 'constraints', None)
self.priors = getattr(obj, 'priors', None)
@property
def gradient(self):
if self._gradient_ is None:
self._gradient_ = numpy.zeros(self._realshape_)
return self._gradient_
#===========================================================================
# Pickling operations
#===========================================================================

View file

@ -296,11 +296,12 @@ def bgplvm_simulation_missing_data(optimize=True, verbose=1,
k = kern.linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
inan = _np.random.binomial(1, .6, size=Y.shape).astype(bool)
m = BayesianGPLVM(Y, Q, init="random", num_inducing=num_inducing, kernel=k)
m = BayesianGPLVM(Y.copy(), Q, init="random", num_inducing=num_inducing, kernel=k)
m.inference_method = VarDTCMissingData()
m.Y[inan] = _np.nan
m.q.variance *= .1
m.parameters_changed()
m.Yreal = Y
if optimize:
print "Optimizing model:"