Merge branch 'devel' of github.com:SheffieldML/GPy into devel

This commit is contained in:
James Hensman 2013-04-22 11:58:17 +01:00
commit 7fbcae2503
4 changed files with 16 additions and 7 deletions

View file

@ -264,8 +264,11 @@ class model(parameterised):
return - LL_gradients - prior_gradients
def objective_and_gradients(self, x):
obj_f = self.objective_function(x)
obj_grads = self.objective_function_gradients(x)
self._set_params_transformed(x)
obj_f = -self.log_likelihood() - self.log_prior()
LL_gradients = self._transform_gradients(self._log_likelihood_gradients())
prior_gradients = self._transform_gradients(self._log_prior_gradients())
obj_grads = - LL_gradients - prior_gradients
return obj_f, obj_grads
def optimize(self, optimizer=None, start=None, **kwargs):

View file

@ -220,7 +220,7 @@ class opt_SGD(Optimizer):
b = len(features)/self.batch_size
features = [features[i::b] for i in range(b)]
NLL = []
import pylab as plt
for count, j in enumerate(features):
self.model.D = len(j)
self.model.likelihood.D = len(j)
@ -230,6 +230,8 @@ class opt_SGD(Optimizer):
shapes = self.get_param_shapes(N, Q)
f, step, Nj = self.step_with_missing_data(f_fp, X, step, shapes)
else:
self.model.likelihood.YYT = np.dot(self.model.likelihood.Y, self.model.likelihood.Y.T)
self.model.likelihood.trYYT = np.trace(self.model.likelihood.YYT)
Nj = N
f, fp = f_fp(self.x_opt)
step = self.momentum * step + self.learning_rate * fp
@ -244,7 +246,11 @@ class opt_SGD(Optimizer):
NLL.append(f)
self.fopt_trace.append(f)
# fig = plt.figure('traces')
# plt.clf()
# plt.plot(self.param_traces['noise'])
# import pdb; pdb.set_trace()
# for k in self.param_traces.keys():
# self.param_traces[k].append(self.model.get(k)[0])

View file

@ -69,8 +69,8 @@ class Bayesian_GPLVM(sparse_GP, GPLVM):
return dKL_dmu, dKL_dS
def dL_dmuS(self):
dL_dmu_psi0, dL_dS_psi0 = self.kern.dpsi1_dmuS(self.dL_dpsi1, self.Z, self.X, self.X_variance)
dL_dmu_psi1, dL_dS_psi1 = self.kern.dpsi0_dmuS(self.dL_dpsi0, self.Z, self.X, self.X_variance)
dL_dmu_psi0, dL_dS_psi0 = self.kern.dpsi0_dmuS(self.dL_dpsi0, self.Z, self.X, self.X_variance)
dL_dmu_psi1, dL_dS_psi1 = self.kern.dpsi1_dmuS(self.dL_dpsi1, self.Z, self.X, self.X_variance)
dL_dmu_psi2, dL_dS_psi2 = self.kern.dpsi2_dmuS(self.dL_dpsi2, self.Z, self.X, self.X_variance)
dL_dmu = dL_dmu_psi0 + dL_dmu_psi1 + dL_dmu_psi2
dL_dS = dL_dS_psi0 + dL_dS_psi1 + dL_dS_psi2

View file

@ -97,7 +97,7 @@ def jitchol_old(A,maxtries=5):
raise linalg.LinAlgError,"not positive definite, even with jitter."
def pdinv(A):
def pdinv(A, *args):
"""
:param A: A DxD pd numpy array
@ -110,7 +110,7 @@ def pdinv(A):
:rval logdet: the log of the determinant of A
:rtype logdet: float64
"""
L = jitchol(A)
L = jitchol(A, *args)
logdet = 2.*np.sum(np.log(np.diag(L)))
Li = chol_inv(L)
Ai = linalg.lapack.flapack.dpotri(L)[0]