diff --git a/GPy/kern/constructors.py b/GPy/kern/constructors.py index ec34242b..90b13600 100644 --- a/GPy/kern/constructors.py +++ b/GPy/kern/constructors.py @@ -44,7 +44,7 @@ def rbf(D,variance=1., lengthscale=None,ARD=False): part = rbfpart(D,variance,lengthscale,ARD) return kern(D, [part]) -def linear(D,variances=None,ARD=True): +def linear(D,variances=None,ARD=False): """ Construct a linear kernel. diff --git a/GPy/likelihoods/EP.py b/GPy/likelihoods/EP.py index f49ed275..9c55e5f7 100644 --- a/GPy/likelihoods/EP.py +++ b/GPy/likelihoods/EP.py @@ -33,7 +33,9 @@ class EP(likelihood): self.Z = 0 self.YYT = None - def predictive_values(self,mu,var): + def predictive_values(self,mu,var,full_cov): + if full_cov: + raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood" return self.likelihood_function.predictive_values(mu,var) def _get_params(self): diff --git a/GPy/likelihoods/Gaussian.py b/GPy/likelihoods/Gaussian.py index a5084cc0..25d12491 100644 --- a/GPy/likelihoods/Gaussian.py +++ b/GPy/likelihoods/Gaussian.py @@ -43,15 +43,25 @@ class Gaussian(likelihood): self.covariance_matrix = np.eye(self.N)*self._variance self.precision = 1./self._variance - def predictive_values(self,mu,var): + def predictive_values(self,mu,var, full_cov): """ Un-normalize the prediction and add the likelihood variance, then return the 5%, 95% interval """ mean = mu*self._std + self._mean - true_var = (var + self._variance)*self._std**2 - _5pc = mean + - 2.*np.sqrt(true_var) - _95pc = mean + 2.*np.sqrt(true_var) - return mean, _5pc, _95pc + if full_cov: + if self.D >1: + raise NotImplementedError, "TODO" + #Note. for D>1, we need to re-normalise all the outputs independently. + # This will mess up computations of diag(true_var), below. + #note that the upper, lower quantiles should be the same shape as mean + true_var = (var + np.eye(var.shape[0])*self._variance)*self._std**2 + _5pc = mean + - 2.*np.sqrt(np.diag(true_var)) + _95pc = mean + 2.*np.sqrt(np.diag(true_var)) + else: + true_var = (var + self._variance)*self._std**2 + _5pc = mean + - 2.*np.sqrt(true_var) + _95pc = mean + 2.*np.sqrt(true_var) + return mean, true_var, _5pc, _95pc def fit_full(self): """ diff --git a/GPy/likelihoods/likelihood.py b/GPy/likelihoods/likelihood.py index 6ec57c07..c1d9585e 100644 --- a/GPy/likelihoods/likelihood.py +++ b/GPy/likelihoods/likelihood.py @@ -25,11 +25,16 @@ class likelihood: def _get_param_names(self): raise NotImplementedError - def _set_params(self,x): + def _set_params(self, x): raise NotImplementedError def fit(self): raise NotImplementedError - def _gradients(self,partial): + def _gradients(self, partial): raise NotImplementedError + + def predictive_values(self, mu, var): + raise NotImplementedError + + diff --git a/GPy/likelihoods/likelihood_functions.py b/GPy/likelihoods/likelihood_functions.py index 3e2a0361..4b8e7013 100644 --- a/GPy/likelihoods/likelihood_functions.py +++ b/GPy/likelihoods/likelihood_functions.py @@ -48,14 +48,14 @@ class probit(likelihood_function): def predictive_values(self,mu,var): """ - Compute mean, and conficence interval (percentiles 5 and 95) of the prediction + Compute mean, variance and conficence interval (percentiles 5 and 95) of the prediction """ mu = mu.flatten() var = var.flatten() mean = stats.norm.cdf(mu/np.sqrt(1+var)) p_025 = np.zeros(mu.shape) p_975 = np.ones(mu.shape) - return mean, p_025, p_975 + return mean, np.nan*var, p_025, p_975 # TODO: better values here (mean is okay) class Poisson(likelihood_function): """ @@ -131,4 +131,4 @@ class Poisson(likelihood_function): tmp = stats.poisson.ppf(np.array([.025,.975]),mean) p_025 = tmp[:,0] p_975 = tmp[:,1] - return mean,p_025,p_975 + return mean,np.nan*mean,p_025,p_975 # better variance here TODO diff --git a/GPy/models/GP.py b/GPy/models/GP.py index 796ab7d6..53ba1183 100644 --- a/GPy/models/GP.py +++ b/GPy/models/GP.py @@ -140,7 +140,7 @@ class GP(model): KiKx = np.dot(self.Ki,Kx) if full_cov: Kxx = self.kern.K(_Xnew, slices1=slices,slices2=slices) - var = Kxx - np.dot(KiKx.T,Kx) #NOTE this won't work for plotting + var = Kxx - np.dot(KiKx.T,Kx) else: Kxx = self.kern.Kdiag(_Xnew, slices=slices) var = Kxx - np.sum(np.multiply(KiKx,Kx),0) @@ -179,7 +179,7 @@ class GP(model): mu, var = self._raw_predict(Xnew, slices, full_cov) #now push through likelihood TODO - mean, _025pm, _975pm = self.likelihood.predictive_values(mu, var) + mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov) return mean, var, _025pm, _975pm