Merge remote-tracking branch 'upstream/devel' into devel

This commit is contained in:
Mike Croucher 2015-04-09 14:21:15 +01:00
commit 82a9bb9d93
4 changed files with 54 additions and 17 deletions

View file

@ -139,7 +139,7 @@ class NormalPosterior(VariationalPosterior):
holds the means and variances for a factorizing multivariate normal distribution
'''
def plot(self, *args):
def plot(self, *args, **kwargs):
"""
Plot latent space X in 1D:
@ -148,8 +148,7 @@ class NormalPosterior(VariationalPosterior):
import sys
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ...plotting.matplot_dep import variational_plots
import matplotlib
return variational_plots.plot(self,*args)
return variational_plots.plot(self, *args, **kwargs)
class SpikeAndSlabPosterior(VariationalPosterior):
'''

View file

@ -60,7 +60,10 @@ class White(Static):
return np.zeros((Z.shape[0], Z.shape[0]), dtype=np.float64)
def update_gradients_full(self, dL_dK, X, X2=None):
self.variance.gradient = np.trace(dL_dK)
if X2 is None:
self.variance.gradient = np.trace(dL_dK)
else:
self.variance.gradient = 0.
def update_gradients_diag(self, dL_dKdiag, X):
self.variance.gradient = dL_dKdiag.sum()

View file

@ -70,7 +70,7 @@ class Likelihood(Parameterized):
"""
raise NotImplementedError
def log_predictive_density(self, y_test, mu_star, var_star):
def log_predictive_density(self, y_test, mu_star, var_star, Y_metadata=None):
"""
Calculation of the log predictive density
@ -87,13 +87,46 @@ class Likelihood(Parameterized):
assert y_test.shape==mu_star.shape
assert y_test.shape==var_star.shape
assert y_test.shape[1] == 1
def integral_generator(y, m, v):
flat_y_test = y_test.flatten()
flat_mu_star = mu_star.flatten()
flat_var_star = var_star.flatten()
if Y_metadata is not None:
#Need to zip individual elements of Y_metadata aswell
Y_metadata_flat = {}
if Y_metadata is not None:
for key, val in Y_metadata.items():
Y_metadata_flat[key] = np.atleast_1d(val).reshape(-1,1)
zipped_values = []
for i in range(y_test.shape[0]):
y_m = {}
for key, val in Y_metadata_flat.items():
if np.isscalar(val) or val.shape[0] == 1:
y_m[key] = val
else:
#Won't broadcast yet
y_m[key] = val[i]
zipped_values.append((flat_y_test[i], flat_mu_star[i], flat_var_star[i], y_m))
else:
#Otherwise just pass along None's
zipped_values = zip(flat_y_test, flat_mu_star, flat_var_star, [None]*y_test.shape[0])
def integral_generator(y, m, v, y_m):
"""Generate a function which can be integrated to give p(Y*|Y) = int p(Y*|f*)p(f*|Y) df*"""
def f(f_star):
return self.pdf(f_star, y)*np.exp(-(1./(2*v))*np.square(m-f_star))
#exponent = np.exp(-(1./(2*v))*np.square(m-f_star))
#from GPy.util.misc import safe_exp
#exponent = safe_exp(exponent)
#return self.pdf(f_star, y, y_m)*exponent
#More stable in the log space
return np.exp(self.logpdf(f_star, y, y_m) -(1./(2*v))*np.square(m-f_star))
return f
scaled_p_ystar, accuracy = zip(*[quad(integral_generator(y, m, v), -np.inf, np.inf) for y, m, v in zip(y_test.flatten(), mu_star.flatten(), var_star.flatten())])
scaled_p_ystar, accuracy = zip(*[quad(integral_generator(y, m, v, y_m), -np.inf, np.inf) for y, m, v, y_m in zipped_values])
scaled_p_ystar = np.array(scaled_p_ystar).reshape(-1,1)
p_ystar = scaled_p_ystar/np.sqrt(2*np.pi*var_star)
return np.log(p_ystar)

View file

@ -1,6 +1,6 @@
import pylab as pb, numpy as np
def plot(parameterized, fignum=None, ax=None, colors=None):
def plot(parameterized, fignum=None, ax=None, colors=None, figsize=(12, 6)):
"""
Plot latent space X in 1D:
@ -13,13 +13,15 @@ def plot(parameterized, fignum=None, ax=None, colors=None):
"""
if ax is None:
fig = pb.figure(num=fignum, figsize=(8, min(12, (2 * parameterized.mean.shape[1]))))
fig = pb.figure(num=fignum, figsize=figsize)
if colors is None:
colors = pb.gca()._get_lines.color_cycle
pb.clf()
else:
colors = iter(colors)
plots = []
lines = []
fills = []
bg_lines = []
means, variances = parameterized.mean, parameterized.variance
x = np.arange(means.shape[0])
for i in range(means.shape[1]):
@ -29,20 +31,20 @@ def plot(parameterized, fignum=None, ax=None, colors=None):
a = ax[i]
else:
raise ValueError("Need one ax per latent dimension input_dim")
a.plot(means, c='k', alpha=.3)
plots.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
a.fill_between(x,
bg_lines.append(a.plot(means, c='k', alpha=.3))
lines.extend(a.plot(x, means.T[i], c=colors.next(), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
fills.append(a.fill_between(x,
means.T[i] - 2 * np.sqrt(variances.T[i]),
means.T[i] + 2 * np.sqrt(variances.T[i]),
facecolor=plots[-1].get_color(),
alpha=.3)
facecolor=lines[-1].get_color(),
alpha=.3))
a.legend(borderaxespad=0.)
a.set_xlim(x.min(), x.max())
if i < means.shape[1] - 1:
a.set_xticklabels('')
pb.draw()
fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
return fig
return dict(lines=lines, fills=fills, bg_lines=bg_lines)
def plot_SpikeSlab(parameterized, fignum=None, ax=None, colors=None, side_by_side=True):
"""