Minor reorganising

This commit is contained in:
Alan Saul 2014-02-11 14:06:42 +00:00
parent 9eef4ebded
commit c76f1a4d6d
3 changed files with 8 additions and 9 deletions

View file

@ -157,14 +157,14 @@ class Param(ObservableArray, Constrainable, Gradcheckable):
#=========================================================================== #===========================================================================
def tie_to(self, param): def tie_to(self, param):
""" """
:param param: the parameter object to tie this parameter to. :param param: the parameter object to tie this parameter to.
Can be ParamConcatenation (retrieved by regexp search) Can be ParamConcatenation (retrieved by regexp search)
Tie this parameter to the given parameter. Tie this parameter to the given parameter.
Broadcasting is not allowed, but you can tie a whole dimension to Broadcasting is not allowed, but you can tie a whole dimension to
one parameter: self[:,0].tie_to(other), where other is a one-value one parameter: self[:,0].tie_to(other), where other is a one-value
parameter. parameter.
Note: For now only one parameter can have ties, so all of a parameter Note: For now only one parameter can have ties, so all of a parameter
will be removed, when re-tieing! will be removed, when re-tieing!
""" """
@ -534,7 +534,7 @@ class ParamConcatenation(object):
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3): def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3):
return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance) return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance)
#checkgrad.__doc__ = Gradcheckable.checkgrad.__doc__ #checkgrad.__doc__ = Gradcheckable.checkgrad.__doc__
__lt__ = lambda self, val: self._vals() < val __lt__ = lambda self, val: self._vals() < val
__le__ = lambda self, val: self._vals() <= val __le__ = lambda self, val: self._vals() <= val
__eq__ = lambda self, val: self._vals() == val __eq__ = lambda self, val: self._vals() == val

View file

@ -92,12 +92,11 @@ class LaplaceInference(object):
iteration = 0 iteration = 0
while difference > self._mode_finding_tolerance and iteration < self._mode_finding_max_iter: while difference > self._mode_finding_tolerance and iteration < self._mode_finding_max_iter:
W = -likelihood.d2logpdf_df2(f, Y, extra_data=Y_metadata) W = -likelihood.d2logpdf_df2(f, Y, extra_data=Y_metadata)
W_f = W*f
grad = likelihood.dlogpdf_df(f, Y, extra_data=Y_metadata) grad = likelihood.dlogpdf_df(f, Y, extra_data=Y_metadata)
W_f = W*f
b = W_f + grad # R+W p46 line 6. b = W_f + grad # R+W p46 line 6.
#W12BiW12Kb, B_logdet = self._compute_B_statistics(K, W.copy(), np.dot(K, b), likelihood.log_concave)
W12BiW12, _, _ = self._compute_B_statistics(K, W, likelihood.log_concave) W12BiW12, _, _ = self._compute_B_statistics(K, W, likelihood.log_concave)
W12BiW12Kb = np.dot(W12BiW12, np.dot(K, b)) W12BiW12Kb = np.dot(W12BiW12, np.dot(K, b))

View file

@ -1,10 +1,10 @@
import numpy as np import numpy as np
import unittest import unittest
import GPy import GPy
from GPy.models import GradientChecker from ..models import GradientChecker
import functools import functools
import inspect import inspect
from GPy.likelihoods import link_functions from ..likelihoods import link_functions
from ..core.parameterization import Param from ..core.parameterization import Param
from functools import partial from functools import partial
#np.random.seed(300) #np.random.seed(300)