remove the print message in model.checkgrad

This commit is contained in:
Zhenwen Dai 2014-09-02 12:33:52 +01:00
parent 808cfb0501
commit e46f3b342e
3 changed files with 4 additions and 5 deletions

View file

@ -291,7 +291,7 @@ class Model(Parameterized):
# just check the global ratio # just check the global ratio
dx = np.zeros(x.shape) dx = np.zeros(x.shape)
dx[transformed_index] = step * (np.sign(np.random.uniform(-1, 1, transformed_index.size)) if transformed_index.size != 2 else 1.) dx[transformed_index] = step * (np.sign(np.random.uniform(-1, 1, transformed_index.size)) if transformed_index.size != 2 else 1.)
# evaulate around the point x # evaulate around the point x
f1 = self._objective(x + dx) f1 = self._objective(x + dx)
f2 = self._objective(x - dx) f2 = self._objective(x - dx)
@ -303,7 +303,6 @@ class Model(Parameterized):
denominator = (2 * np.dot(dx, gradient)) denominator = (2 * np.dot(dx, gradient))
global_ratio = (f1 - f2) / np.where(denominator == 0., 1e-32, denominator) global_ratio = (f1 - f2) / np.where(denominator == 0., 1e-32, denominator)
global_diff = np.abs(f1 - f2) < tolerance and np.allclose(gradient, 0, atol=tolerance) global_diff = np.abs(f1 - f2) < tolerance and np.allclose(gradient, 0, atol=tolerance)
print self.mpi_comm.rank,global_ratio,global_diff
if global_ratio is np.nan: if global_ratio is np.nan:
global_ratio = 0 global_ratio = 0
return np.abs(1. - global_ratio) < tolerance or global_diff return np.abs(1. - global_ratio) < tolerance or global_diff

View file

@ -46,7 +46,7 @@ class SparseGP_MPI(SparseGP):
self.add_parameter(self.X, index=0) self.add_parameter(self.X, index=0)
if variational_prior is not None: if variational_prior is not None:
self.add_parameter(variational_prior) self.add_parameter(variational_prior)
self.X.fix() # self.X.fix()
self.mpi_comm = mpi_comm self.mpi_comm = mpi_comm
# Manage the data (Y) division # Manage the data (Y) division

View file

@ -70,8 +70,8 @@ class SSGPLVM(SparseGP_MPI):
X = SpikeAndSlabPosterior(X, X_variance, gamma) X = SpikeAndSlabPosterior(X, X_variance, gamma)
super(SSGPLVM,self).__init__(X, Y, Z, kernel, likelihood, variational_prior=self.variational_prior, inference_method=inference_method, name=name, mpi_comm=mpi_comm, normalizer=normalizer, **kwargs) super(SSGPLVM,self).__init__(X, Y, Z, kernel, likelihood, variational_prior=self.variational_prior, inference_method=inference_method, name=name, mpi_comm=mpi_comm, normalizer=normalizer, **kwargs)
self.X.unfix() # self.X.unfix()
self.X.variance.constrain_positive() # self.X.variance.constrain_positive()
if self.group_spike: if self.group_spike:
[self.X.gamma[:,i].tie('tieGamma'+str(i)) for i in xrange(self.X.gamma.shape[1])] # Tie columns together [self.X.gamma[:,i].tie('tieGamma'+str(i)) for i in xrange(self.X.gamma.shape[1])] # Tie columns together