Minor changes

This commit is contained in:
Ricardo 2013-06-05 18:01:53 +01:00
parent ab6a87a4d5
commit 616e8c9026
5 changed files with 10 additions and 14 deletions

View file

@ -36,8 +36,9 @@ class FITC(SparseGP):
For a Gaussian likelihood, no iteration is required: For a Gaussian likelihood, no iteration is required:
this function does nothing this function does nothing
""" """
self.likelihood.restart()
self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0) self.likelihood.fit_FITC(self.Kmm,self.psi1,self.psi0)
self._set_params(self._get_params()) # update the GP self._set_params(self._get_params())
def _compute_kernel_matrices(self): def _compute_kernel_matrices(self):
# kernel computations, using BGPLVM notation # kernel computations, using BGPLVM notation

View file

@ -67,6 +67,7 @@ class GP(GPBase):
For a Gaussian likelihood, no iteration is required: For a Gaussian likelihood, no iteration is required:
this function does nothing this function does nothing
""" """
self.likelihood.restart()
self.likelihood.fit_full(self.kern.K(self.X)) self.likelihood.fit_full(self.kern.K(self.X))
self._set_params(self._get_params()) # update the GP self._set_params(self._get_params()) # update the GP

View file

@ -173,7 +173,7 @@ class SparseGP(GPBase):
this function does nothing this function does nothing
""" """
if not isinstance(self.likelihood, Gaussian): # Updates not needed for Gaussian likelihood if not isinstance(self.likelihood, Gaussian): # Updates not needed for Gaussian likelihood
self.likelihood.restart() # TODO check consistency with pseudo_EP self.likelihood.restart()
if self.has_uncertain_inputs: if self.has_uncertain_inputs:
Lmi = chol_inv(self.Lm) Lmi = chol_inv(self.Lm)
Kmmi = tdot(Lmi.T) Kmmi = tdot(Lmi.T)

View file

@ -26,7 +26,7 @@ class FITCClassification(FITC):
""" """
def __init__(self, X, Y=None, likelihood=None, kernel=None, normalize_X=False, normalize_Y=False, Z=None, M=10): def __init__(self, X, Y=None, likelihood=None, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10):
if kernel is None: if kernel is None:
kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1],1e-3) kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1],1e-3)
@ -38,7 +38,7 @@ class FITCClassification(FITC):
raise Warning, 'likelihood.data and Y are different.' raise Warning, 'likelihood.data and Y are different.'
if Z is None: if Z is None:
i = np.random.permutation(X.shape[0])[:M] i = np.random.permutation(X.shape[0])[:num_inducing]
Z = X[i].copy() Z = X[i].copy()
else: else:
assert Z.shape[1]==X.shape[1] assert Z.shape[1]==X.shape[1]

View file

@ -175,7 +175,6 @@ class GradientTests(unittest.TestCase):
m.ensure_default_constraints() m.ensure_default_constraints()
m.update_likelihood_approximation() m.update_likelihood_approximation()
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
# self.assertTrue(m.EPEM)
def test_sparse_EP_DTC_probit(self): def test_sparse_EP_DTC_probit(self):
N = 20 N = 20
@ -194,17 +193,12 @@ class GradientTests(unittest.TestCase):
N = 20 N = 20
X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None] X = np.hstack([np.random.rand(N / 2) + 1, np.random.rand(N / 2) - 1])[:, None]
k = GPy.kern.rbf(1) + GPy.kern.white(1) k = GPy.kern.rbf(1) + GPy.kern.white(1)
Y = np.hstack([np.ones(N/2),-np.ones(N/2)])[:,None] Y = np.hstack([np.ones(N/2),np.zeros(N/2)])[:,None]
m = GPy.models.FITCClassification(X, Y=Y)
distribution = GPy.likelihoods.likelihood_functions.Binomial() m.ensure_default_constraints()
likelihood = GPy.likelihoods.EP(Y, distribution) m.update_likelihood_approximation()
#likelihood = GPy.inference.likelihoods.Binomial(Y)
m = GPy.models.generalized_FITC(X,likelihood,k,inducing=4)
m.constrain_positive('(var|len)')
m.approximate_likelihood()
self.assertTrue(m.checkgrad()) self.assertTrue(m.checkgrad())
if __name__ == "__main__": if __name__ == "__main__":
print "Running unit tests, please be (very) patient..." print "Running unit tests, please be (very) patient..."
unittest.main() unittest.main()