[whitespaces]

This commit is contained in:
mzwiessele 2014-07-07 16:00:58 -07:00
parent 8dacea2c13
commit 7c8de7fff8
4 changed files with 5 additions and 8 deletions

View file

@ -751,8 +751,6 @@ class OptimizationHandlable(Indexable):
Transform the gradients by multiplying the gradient factor for each Transform the gradients by multiplying the gradient factor for each
constraint to it. constraint to it.
""" """
if self.has_parent():
return g
[np.put(g, i, g[i] * c.gradfactor(self.param_array[i])) for c, i in self.constraints.iteritems() if c != __fixed__] [np.put(g, i, g[i] * c.gradfactor(self.param_array[i])) for c, i in self.constraints.iteritems() if c != __fixed__]
if self._has_fixes(): return g[self._fixes_] if self._has_fixes(): return g[self._fixes_]
return g return g

View file

@ -76,11 +76,11 @@ class Uniform(Prior):
o = super(Prior, cls).__new__(cls, lower, upper) o = super(Prior, cls).__new__(cls, lower, upper)
cls._instances.append(weakref.ref(o)) cls._instances.append(weakref.ref(o))
return cls._instances[-1]() return cls._instances[-1]()
def __init__(self, lower, upper): def __init__(self, lower, upper):
self.lower = float(lower) self.lower = float(lower)
self.upper = float(upper) self.upper = float(upper)
def __str__(self): def __str__(self):
return "[" + str(np.round(self.lower)) + ', ' + str(np.round(self.upper)) + ']' return "[" + str(np.round(self.lower)) + ', ' + str(np.round(self.upper)) + ']'
@ -93,7 +93,7 @@ class Uniform(Prior):
def rvs(self, n): def rvs(self, n):
return np.random.uniform(self.lower, self.upper, size=n) return np.random.uniform(self.lower, self.upper, size=n)
class LogGaussian(Prior): class LogGaussian(Prior):
""" """
Implementation of the univariate *log*-Gaussian probability function, coupled with random variables. Implementation of the univariate *log*-Gaussian probability function, coupled with random variables.
@ -246,7 +246,7 @@ class Gamma(Prior):
""" """
Creates an instance of a Gamma Prior by specifying the Expected value(s) Creates an instance of a Gamma Prior by specifying the Expected value(s)
and Variance(s) of the distribution. and Variance(s) of the distribution.
:param E: expected value :param E: expected value
:param V: variance :param V: variance
""" """

View file

@ -296,7 +296,7 @@ def bgplvm_simulation_missing_data(optimize=True, verbose=1,
from GPy.models import BayesianGPLVM from GPy.models import BayesianGPLVM
from GPy.inference.latent_function_inference.var_dtc import VarDTCMissingData from GPy.inference.latent_function_inference.var_dtc import VarDTCMissingData
D1, D2, D3, N, num_inducing, Q = 1000, 5, 8, 400, 3, 4 D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 400, 3, 4
_, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim)
Y = Ylist[0] Y = Ylist[0]
k = kern.Linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) k = kern.Linear(Q, ARD=True)# + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)

View file

@ -302,7 +302,6 @@ class VarDTCMissingData(LatentFunctionInference):
#if ((i+1.)/size) >= next_ten: #if ((i+1.)/size) >= next_ten:
# logger.info('preparing traces {:> 6.1%}'.format((i+1.)/size)) # logger.info('preparing traces {:> 6.1%}'.format((i+1.)/size))
# next_ten += .1 # next_ten += .1
#y = y[v] #y = y[v]
if het_noise: beta = beta_all[i] if het_noise: beta = beta_all[i]
else: beta = beta_all else: beta = beta_all