Merge branch 'master' into devel

This commit is contained in:
Nicolo Fusi 2013-03-14 10:25:48 +00:00
commit 99e5075488
6 changed files with 31 additions and 14 deletions

View file

@ -44,7 +44,7 @@ def rbf(D,variance=1., lengthscale=None,ARD=False):
part = rbfpart(D,variance,lengthscale,ARD) part = rbfpart(D,variance,lengthscale,ARD)
return kern(D, [part]) return kern(D, [part])
def linear(D,variances=None,ARD=True): def linear(D,variances=None,ARD=False):
""" """
Construct a linear kernel. Construct a linear kernel.

View file

@ -33,7 +33,9 @@ class EP(likelihood):
self.Z = 0 self.Z = 0
self.YYT = None self.YYT = None
def predictive_values(self,mu,var): def predictive_values(self,mu,var,full_cov):
if full_cov:
raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood"
return self.likelihood_function.predictive_values(mu,var) return self.likelihood_function.predictive_values(mu,var)
def _get_params(self): def _get_params(self):

View file

@ -43,15 +43,25 @@ class Gaussian(likelihood):
self.covariance_matrix = np.eye(self.N)*self._variance self.covariance_matrix = np.eye(self.N)*self._variance
self.precision = 1./self._variance self.precision = 1./self._variance
def predictive_values(self,mu,var): def predictive_values(self,mu,var, full_cov):
""" """
Un-normalize the prediction and add the likelihood variance, then return the 5%, 95% interval Un-normalize the prediction and add the likelihood variance, then return the 5%, 95% interval
""" """
mean = mu*self._std + self._mean mean = mu*self._std + self._mean
if full_cov:
if self.D >1:
raise NotImplementedError, "TODO"
#Note. for D>1, we need to re-normalise all the outputs independently.
# This will mess up computations of diag(true_var), below.
#note that the upper, lower quantiles should be the same shape as mean
true_var = (var + np.eye(var.shape[0])*self._variance)*self._std**2
_5pc = mean + - 2.*np.sqrt(np.diag(true_var))
_95pc = mean + 2.*np.sqrt(np.diag(true_var))
else:
true_var = (var + self._variance)*self._std**2 true_var = (var + self._variance)*self._std**2
_5pc = mean + - 2.*np.sqrt(true_var) _5pc = mean + - 2.*np.sqrt(true_var)
_95pc = mean + 2.*np.sqrt(true_var) _95pc = mean + 2.*np.sqrt(true_var)
return mean, _5pc, _95pc return mean, true_var, _5pc, _95pc
def fit_full(self): def fit_full(self):
""" """

View file

@ -25,11 +25,16 @@ class likelihood:
def _get_param_names(self): def _get_param_names(self):
raise NotImplementedError raise NotImplementedError
def _set_params(self,x): def _set_params(self, x):
raise NotImplementedError raise NotImplementedError
def fit(self): def fit(self):
raise NotImplementedError raise NotImplementedError
def _gradients(self,partial): def _gradients(self, partial):
raise NotImplementedError raise NotImplementedError
def predictive_values(self, mu, var):
raise NotImplementedError

View file

@ -48,14 +48,14 @@ class probit(likelihood_function):
def predictive_values(self,mu,var): def predictive_values(self,mu,var):
""" """
Compute mean, and conficence interval (percentiles 5 and 95) of the prediction Compute mean, variance and conficence interval (percentiles 5 and 95) of the prediction
""" """
mu = mu.flatten() mu = mu.flatten()
var = var.flatten() var = var.flatten()
mean = stats.norm.cdf(mu/np.sqrt(1+var)) mean = stats.norm.cdf(mu/np.sqrt(1+var))
p_025 = np.zeros(mu.shape) p_025 = np.zeros(mu.shape)
p_975 = np.ones(mu.shape) p_975 = np.ones(mu.shape)
return mean, p_025, p_975 return mean, np.nan*var, p_025, p_975 # TODO: better values here (mean is okay)
class Poisson(likelihood_function): class Poisson(likelihood_function):
""" """
@ -131,4 +131,4 @@ class Poisson(likelihood_function):
tmp = stats.poisson.ppf(np.array([.025,.975]),mean) tmp = stats.poisson.ppf(np.array([.025,.975]),mean)
p_025 = tmp[:,0] p_025 = tmp[:,0]
p_975 = tmp[:,1] p_975 = tmp[:,1]
return mean,p_025,p_975 return mean,np.nan*mean,p_025,p_975 # better variance here TODO

View file

@ -140,7 +140,7 @@ class GP(model):
KiKx = np.dot(self.Ki,Kx) KiKx = np.dot(self.Ki,Kx)
if full_cov: if full_cov:
Kxx = self.kern.K(_Xnew, slices1=slices,slices2=slices) Kxx = self.kern.K(_Xnew, slices1=slices,slices2=slices)
var = Kxx - np.dot(KiKx.T,Kx) #NOTE this won't work for plotting var = Kxx - np.dot(KiKx.T,Kx)
else: else:
Kxx = self.kern.Kdiag(_Xnew, slices=slices) Kxx = self.kern.Kdiag(_Xnew, slices=slices)
var = Kxx - np.sum(np.multiply(KiKx,Kx),0) var = Kxx - np.sum(np.multiply(KiKx,Kx),0)
@ -179,7 +179,7 @@ class GP(model):
mu, var = self._raw_predict(Xnew, slices, full_cov) mu, var = self._raw_predict(Xnew, slices, full_cov)
#now push through likelihood TODO #now push through likelihood TODO
mean, _025pm, _975pm = self.likelihood.predictive_values(mu, var) mean, var, _025pm, _975pm = self.likelihood.predictive_values(mu, var, full_cov)
return mean, var, _025pm, _975pm return mean, var, _025pm, _975pm