From 81ef734908411ddaec45467b133b5b7a5d44eda2 Mon Sep 17 00:00:00 2001 From: Alan Saul Date: Wed, 2 Sep 2015 10:46:30 +0300 Subject: [PATCH] Reindented, did some profiling which looks promising --- GPy/kern/_src/psi_comp/rbf_psi_comp.py | 2 -- GPy/models/bayesian_gplvm_minibatch.py | 19 +++++++++---------- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/GPy/kern/_src/psi_comp/rbf_psi_comp.py b/GPy/kern/_src/psi_comp/rbf_psi_comp.py index 9d85c93e..3fa2de94 100644 --- a/GPy/kern/_src/psi_comp/rbf_psi_comp.py +++ b/GPy/kern/_src/psi_comp/rbf_psi_comp.py @@ -68,7 +68,6 @@ def __psi2computations(variance, lengthscale, Z, mu, S): _psi2 = variance*variance*np.exp(_psi2_logdenom[:,None,None]+_psi2_exp1[None,:,:]+_psi2_exp2) return _psi2 -@profile def psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, lengthscale, Z, variational_posterior, psi0=None, psi1=None, psi2=None, Lpsi0=None, Lpsi1=None, Lpsi2=None): ARD = (len(lengthscale)!=1) @@ -122,7 +121,6 @@ def __psi1compDer(dL_dpsi1, variance, lengthscale, Z, mu, S, psi1=None, Lpsi1=No return _dL_dvar, _dL_dl, _dL_dZ, _dL_dmu, _dL_dS -@profile def __psi2compDer(dL_dpsi2, variance, lengthscale, Z, mu, S, psi2=None, Lpsi2=None): """ Z - MxQ diff --git a/GPy/models/bayesian_gplvm_minibatch.py b/GPy/models/bayesian_gplvm_minibatch.py index ab4cc909..5fd9af60 100644 --- a/GPy/models/bayesian_gplvm_minibatch.py +++ b/GPy/models/bayesian_gplvm_minibatch.py @@ -122,15 +122,15 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): """ super(BayesianGPLVMMiniBatch, self)._outer_values_update(full_values) if self.has_uncertain_inputs(): - meangrad_tmp, vargrad_tmp = self.kern.gradients_qX_expectations( - variational_posterior=self.X, - Z=self.Z, dL_dpsi0=full_values['dL_dpsi0'], - dL_dpsi1=full_values['dL_dpsi1'], - dL_dpsi2=full_values['dL_dpsi2'], - psi0=self.psi0, psi1=self.psi1, psi2=self.psi2, - Lpsi0=full_values['Lpsi0'], Lpsi1=full_values['Lpsi1'], Lpsi2=full_values['Lpsi2']) - full_values['meangrad'] += meangrad_tmp - full_values['vargrad'] += vargrad_tmp + meangrad_tmp, vargrad_tmp = self.kern.gradients_qX_expectations( + variational_posterior=self.X, + Z=self.Z, dL_dpsi0=full_values['dL_dpsi0'], + dL_dpsi1=full_values['dL_dpsi1'], + dL_dpsi2=full_values['dL_dpsi2'], + psi0=self.psi0, psi1=self.psi1, psi2=self.psi2, + Lpsi0=full_values['Lpsi0'], Lpsi1=full_values['Lpsi1'], Lpsi2=full_values['Lpsi2']) + full_values['meangrad'] += meangrad_tmp + full_values['vargrad'] += vargrad_tmp else: full_values['Xgrad'] = self.kern.gradients_X(full_values['dL_dKnm'], self.X, self.Z) full_values['Xgrad'] += self.kern.gradients_X_diag(full_values['dL_dKdiag'], self.X) @@ -146,7 +146,6 @@ class BayesianGPLVMMiniBatch(SparseGPMiniBatch): full_values['meangrad'] = np.zeros((self.X.shape[0], self.X.shape[1])) full_values['vargrad'] = np.zeros((self.X.shape[0], self.X.shape[1])) - #FIXME Hack full_values['dL_dpsi0'] = ObsAr(np.zeros(self.X.shape[0])) full_values['dL_dpsi1'] = ObsAr(np.zeros((self.X.shape[0], self.Z.shape[0]))) full_values['dL_dpsi2'] = ObsAr(np.zeros((self.Z.shape[0], self.Z.shape[0])))