Fixed more errors in docs 2

This commit is contained in:
James McMurray 2013-09-20 17:46:23 +01:00
parent 7f2472fa22
commit aa5ebcbd8f
15 changed files with 99 additions and 84 deletions

View file

@ -53,9 +53,11 @@ class TanhWarpingFunction(WarpingFunction):
self.num_parameters = 3 * self.n_terms
def f(self,y,psi):
"""transform y with f using parameter vector psi
"""
transform y with f using parameter vector psi
psi = [[a,b,c]]
f = \sum_{terms} a * tanh(b*(y+c))
::math::`f = \\sum_{terms} a * tanh(b*(y+c))`
"""
#1. check that number of params is consistent
@ -77,8 +79,7 @@ class TanhWarpingFunction(WarpingFunction):
"""
calculate the numerical inverse of f
== input ==
iterations: number of N.R. iterations
:param iterations: number of N.R. iterations
"""
@ -165,9 +166,11 @@ class TanhWarpingFunction_d(WarpingFunction):
self.num_parameters = 3 * self.n_terms + 1
def f(self,y,psi):
"""transform y with f using parameter vector psi
"""
Transform y with f using parameter vector psi
psi = [[a,b,c]]
f = \sum_{terms} a * tanh(b*(y+c))
:math:`f = \\sum_{terms} a * tanh(b*(y+c))`
"""
#1. check that number of params is consistent
@ -189,8 +192,7 @@ class TanhWarpingFunction_d(WarpingFunction):
"""
calculate the numerical inverse of f
== input ==
iterations: number of N.R. iterations
:param max_iterations: maximum number of N.R. iterations
"""
@ -214,12 +216,13 @@ class TanhWarpingFunction_d(WarpingFunction):
def fgrad_y(self, y, psi, return_precalc = False):
"""
gradient of f w.r.t to y ([N x 1])
returns: Nx1 vector of derivatives, unless return_precalc is true,
then it also returns the precomputed stuff
:returns: Nx1 vector of derivatives, unless return_precalc is true, then it also returns the precomputed stuff
"""
mpsi = psi.copy()
mpsi = psi.coSpy()
d = psi[-1]
mpsi = mpsi[:self.num_parameters-1].reshape(self.n_terms, 3)
@ -242,7 +245,7 @@ class TanhWarpingFunction_d(WarpingFunction):
"""
gradient of f w.r.t to y and psi
returns: NxIx4 tensor of partial derivatives
:returns: NxIx4 tensor of partial derivatives
"""