diff --git a/GPy/core/parameterization/transformations.py b/GPy/core/parameterization/transformations.py index e17da404..06d3643a 100644 --- a/GPy/core/parameterization/transformations.py +++ b/GPy/core/parameterization/transformations.py @@ -33,7 +33,8 @@ class Transformation(object): class Logexp(Transformation): domain = _POSITIVE def f(self, x): - return np.where(x>_lim_val, x, np.log(1. + np.exp(x))) + return np.where(x>_lim_val, x, np.log(1. + np.exp(np.clip(x, -np.inf, _lim_val)))) + #raises overflow warning: return np.where(x>_lim_val, x, np.log(1. + np.exp(x))) def finv(self, f): return np.where(f>_lim_val, f, np.log(np.exp(f) - 1.)) def gradfactor(self, f): diff --git a/GPy/inference/latent_function_inference/dtc.py b/GPy/inference/latent_function_inference/dtc.py index bcd0aab8..dbbff6d0 100644 --- a/GPy/inference/latent_function_inference/dtc.py +++ b/GPy/inference/latent_function_inference/dtc.py @@ -44,9 +44,8 @@ class DTC(object): Kmmi, L, Li, _ = pdinv(Kmm) # Compute A - #LiUT, _ = dtrtrs(L, U.T*np.sqrt(beta), lower=1) - LiUT = np.dot(Li, U.T)*np.sqrt(beta) - A = tdot(LiUT) + np.eye(num_inducing) + LiUTbeta = np.dot(Li, U.T)*np.sqrt(beta) + A = tdot(LiUTbeta) + np.eye(num_inducing) # factor A LA = jitchol(A) diff --git a/GPy/inference/latent_function_inference/posterior.py b/GPy/inference/latent_function_inference/posterior.py index c3aa9b36..09ac96e8 100644 --- a/GPy/inference/latent_function_inference/posterior.py +++ b/GPy/inference/latent_function_inference/posterior.py @@ -2,7 +2,7 @@ # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np -from ...util.linalg import pdinv, dpotrs, tdot, dtrtrs, dpotri, symmetrify +from ...util.linalg import pdinv, dpotrs, tdot, dtrtrs, dpotri, symmetrify, jitchol, dtrtri class Posterior(object): """ @@ -80,8 +80,8 @@ class Posterior(object): @property def covariance(self): if self._covariance is None: - LiK, _ = dtrtrs(self.woodbury_chol, self._K, lower=1) - self._covariance = self._K - tdot(LiK.T) + #LiK, _ = dtrtrs(self.woodbury_chol, self._K, lower=1) + self._covariance = self._K - self._K.dot(self.woodbury_inv).dot(self._K) return self._covariance @property @@ -93,20 +93,30 @@ class Posterior(object): @property def woodbury_chol(self): if self._woodbury_chol is None: - #try computing woodbury chol from cov + #compute woodbury chol from if self._woodbury_inv is not None: _, _, self._woodbury_chol, _ = pdinv(self._woodbury_inv) + #Li = jitchol(self._woodbury_inv) + #self._woodbury_chol, _ = dtrtri(Li) + #W, _, _, _, = pdinv(self._woodbury_inv) + #symmetrify(W) + #self._woodbury_chol = jitchol(W) + #try computing woodbury chol from cov elif self._covariance is not None: + raise NotImplementedError, "TODO: check code here" B = self._K - self._covariance tmp, _ = dpotrs(self.K_chol, B) self._woodbury_inv, _ = dpotrs(self.K_chol, tmp.T) _, _, self._woodbury_chol, _ = pdinv(self._woodbury_inv) + else: + raise ValueError, "insufficient information to compute posterior" return self._woodbury_chol @property def woodbury_inv(self): if self._woodbury_inv is None: - self._woodbury_inv, _ = dpotri(self.woodbury_chol) + self._woodbury_inv, _ = dpotri(self.woodbury_chol, lower=0) + #self._woodbury_inv, _ = dpotrs(self.woodbury_chol, np.eye(self.woodbury_chol.shape[0]), lower=1) symmetrify(self._woodbury_inv) return self._woodbury_inv diff --git a/GPy/inference/latent_function_inference/varDTC.py b/GPy/inference/latent_function_inference/varDTC.py index 08329b5a..d7f770c8 100644 --- a/GPy/inference/latent_function_inference/varDTC.py +++ b/GPy/inference/latent_function_inference/varDTC.py @@ -34,7 +34,7 @@ class VarDTC(object): Note that L may have fewer columns than Y. """ N, D = Y.shape - if (N>D): + if (N>=D): return param_to_array(Y) else: return jitchol(tdot(Y))