assorted fixes

This commit is contained in:
James Hensman 2014-02-06 09:28:27 +00:00
parent 75241ecf89
commit 6346af8764
5 changed files with 10 additions and 8 deletions

View file

@ -58,7 +58,6 @@ class GP(Model):
self.parameters_changed() self.parameters_changed()
def parameters_changed(self): def parameters_changed(self):
print self.kern
self.posterior, self._log_marginal_likelihood, grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y) self.posterior, self._log_marginal_likelihood, grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y)
self._dL_dK = grad_dict['dL_dK'] self._dL_dK = grad_dict['dL_dK']

View file

@ -170,15 +170,13 @@ class Model(Parameterized):
# first take care of all parameters (from N(0,1)) # first take care of all parameters (from N(0,1))
#x = self._get_params_transformed() #x = self._get_params_transformed()
x = np.random.randn(self.size_transformed) x = np.random.randn(self.size_transformed)
self._set_params_transformed(x) x = self._untransform_params(x)
# now draw from prior where possible # now draw from prior where possible
x = self._get_params() if self.priors is not None and len(self.priors):
if self.priors is not None:
[np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None] [np.put(x, i, p.rvs(1)) for i, p in enumerate(self.priors) if not p is None]
self._set_params(x) self._set_params(x)
#self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...) #self._set_params_transformed(self._get_params_transformed()) # makes sure all of the tied parameters get the same init (since there's only one prior object...)
def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs): def optimize_restarts(self, num_restarts=10, robust=False, verbose=True, parallel=False, num_processes=None, **kwargs):
""" """
Perform random restarts of the model, and set the model to the best Perform random restarts of the model, and set the model to the best

View file

@ -27,6 +27,7 @@ class ObservableArray(ListArray, Observable):
""" """
__array_priority__ = 0 # Never give back Param __array_priority__ = 0 # Never give back Param
def __new__(cls, input_array): def __new__(cls, input_array):
cls.__name__ = "ObservableArray\n "
obj = super(ObservableArray, cls).__new__(cls, input_array).view(cls) obj = super(ObservableArray, cls).__new__(cls, input_array).view(cls)
obj._observers_ = {} obj._observers_ = {}
return obj return obj

View file

@ -316,7 +316,10 @@ class Parameterized(Constrainable, Pickleable, Observable):
return n return n
def _get_params(self): def _get_params(self):
# don't overwrite this anymore! # don't overwrite this anymore!
if not self.size:
return np.empty(shape=(0,), dtype=np.float64)
return numpy.hstack([x._get_params() for x in self._parameters_ if x.size>0]) return numpy.hstack([x._get_params() for x in self._parameters_ if x.size>0])
def _set_params(self, params, update=True): def _set_params(self, params, update=True):
# don't overwrite this anymore! # don't overwrite this anymore!
[p._set_params(params[s], update=update) for p,s in itertools.izip(self._parameters_,self._param_slices_)] [p._set_params(params[s], update=update) for p,s in itertools.izip(self._parameters_,self._param_slices_)]
@ -330,10 +333,12 @@ class Parameterized(Constrainable, Pickleable, Observable):
return p return p
def _set_params_transformed(self, p): def _set_params_transformed(self, p):
# inverse apply transformations for parameters and set the resulting parameters # inverse apply transformations for parameters and set the resulting parameters
self._set_params(self._untransform_params(p))
def _untransform_params(self, p):
p = p.copy() p = p.copy()
if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp if self._has_fixes(): tmp = self._get_params(); tmp[self._fixes_] = p; p = tmp; del tmp
[numpy.put(p, ind, c.f(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__] [numpy.put(p, ind, c.f(p[ind])) for c,ind in self.constraints.iteritems() if c != __fixed__]
self._set_params(p) return p
def _name_changed(self, param, old_name): def _name_changed(self, param, old_name):
if hasattr(self, old_name) and old_name in self._added_names_: if hasattr(self, old_name) and old_name in self._added_names_:
delattr(self, old_name) delattr(self, old_name)

View file

@ -43,7 +43,6 @@ class SparseGP(GP):
print "defaulting to ", inference_method, "for latent function inference" print "defaulting to ", inference_method, "for latent function inference"
self.Z = Param('inducing inputs', Z) self.Z = Param('inducing inputs', Z)
self.num_inducing = Z.shape[0] self.num_inducing = Z.shape[0]
if not (X_variance is None): if not (X_variance is None):