input_sensitivity and ard plotting

This commit is contained in:
Max Zwiessele 2014-02-24 14:47:43 +00:00
parent d90d67a8c1
commit b200b9fa90
11 changed files with 108 additions and 83 deletions

View file

@ -340,6 +340,10 @@ class Parameterizable(Constrainable):
if add_self: names = map(lambda x: adjust(self.name) + "." + x, names) if add_self: names = map(lambda x: adjust(self.name) + "." + x, names)
return names return names
@property
def num_params(self):
return len(self._parameters_)
def _add_parameter_name(self, param): def _add_parameter_name(self, param):
pname = adjust_name_for_printing(param.name) pname = adjust_name_for_printing(param.name)
# and makes sure to not delete programmatically added parameters # and makes sure to not delete programmatically added parameters

View file

@ -164,12 +164,11 @@ def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40,
_np.random.seed(0) _np.random.seed(0)
data = GPy.util.datasets.oil() data = GPy.util.datasets.oil()
kernel = GPy.kern.RBF(Q, 1., [.1] * Q, ARD=True)# + GPy.kern.Bias(Q, _np.exp(-2)) kernel = GPy.kern.RBF(Q, 1., _np.random.uniform(0,1,(Q,)), ARD=True)# + GPy.kern.Bias(Q, _np.exp(-2))
Y = data['X'][:N] Y = data['X'][:N]
m = GPy.models.BayesianGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing, **k) m = GPy.models.BayesianGPLVM(Y, Q, kernel=kernel, num_inducing=num_inducing, **k)
m.data_labels = data['Y'][:N].argmax(axis=1) m.data_labels = data['Y'][:N].argmax(axis=1)
m['.*noise.var'] = Y.var() / 100.
if optimize: if optimize:
m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05) m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05)

View file

@ -83,7 +83,7 @@ class Add(Kern):
from white import White from white import White
from rbf import RBF from rbf import RBF
#from rbf_inv import RBFInv #from rbf_inv import RBFInv
#from bias import Bias from bias import Bias
from linear import Linear from linear import Linear
#ffrom fixed import Fixed #ffrom fixed import Fixed
@ -131,11 +131,11 @@ class Add(Kern):
def gradients_Z_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, mu, S, Z): def gradients_Z_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, mu, S, Z):
from white import white from white import White
from rbf import rbf from rbf import RBF
#from rbf_inv import rbfinv #from rbf_inv import rbfinv
#from bias import bias from bias import Bias
from linear import linear from linear import Linear
#ffrom fixed import fixed #ffrom fixed import fixed
target = np.zeros(Z.shape) target = np.zeros(Z.shape)
@ -146,15 +146,15 @@ class Add(Kern):
for p2, is2 in zip(self._parameters_, self.input_slices): for p2, is2 in zip(self._parameters_, self.input_slices):
if p2 is p1: if p2 is p1:
continue continue
if isinstance(p2, white): if isinstance(p2, White):
continue continue
elif isinstance(p2, bias): elif isinstance(p2, Bias):
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.variance * 2. eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.variance * 2.
else: else:
eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.psi1(z[:,is2], mu[:,is2], s[:,is2]) * 2. eff_dL_dpsi1 += dL_dpsi2.sum(1) * p2.psi1(Z[:,is2], mu[:,is2], S[:,is2]) * 2.
target += p1.gradients_z_variational(dL_dkmm, dL_dpsi0, eff_dL_dpsi1, dL_dpsi2, mu[:,is1], s[:,is1], z[:,is1]) target += p1.gradients_z_variational(dL_dKmm, dL_dpsi0, eff_dL_dpsi1, dL_dpsi2, mu[:,is1], S[:,is1], Z[:,is1])
return target return target
def gradients_muS_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, mu, S, Z): def gradients_muS_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, mu, S, Z):
@ -195,6 +195,12 @@ class Add(Kern):
from ..plotting.matplot_dep import kernel_plots from ..plotting.matplot_dep import kernel_plots
kernel_plots.plot(self,*args) kernel_plots.plot(self,*args)
def input_sensitivity(self):
in_sen = np.zeros((self.input_dim, self.num_params))
for i, [p, i_s] in enumerate(zip(self._parameters_, self.input_slices)):
in_sen[i_s, i] = p.input_sensitivity()
return in_sen
def _getstate(self): def _getstate(self):
""" """
Get the current state of the class, Get the current state of the class,

View file

@ -28,12 +28,12 @@ class Bias(Kern):
self.variance.gradient = dL_dK.sum() self.variance.gradient = dL_dK.sum()
def update_gradients_diag(self, dL_dKdiag, X): def update_gradients_diag(self, dL_dKdiag, X):
self.variance.gradient = dL_dK.sum() self.variance.gradient = dL_dKdiag.sum()
def gradients_X(self, dL_dK,X, X2, target): def gradients_X(self, dL_dK,X, X2):
return np.zeros(X.shape) return np.zeros(X.shape)
def gradients_X_diag(self,dL_dKdiag,X,target): def gradients_X_diag(self,dL_dKdiag,X):
return np.zeros(X.shape) return np.zeros(X.shape)

View file

@ -61,16 +61,20 @@ class Kern(Parameterized):
def gradients_q_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational): def gradients_q_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, posterior_variational):
raise NotImplementedError raise NotImplementedError
def plot_ARD(self, *args): def plot_ARD(self, *args, **kw):
"""If an ARD kernel is present, plot a bar representation using matplotlib if "matplotlib" in sys.modules:
from ...plotting.matplot_dep import kernel_plots
See GPy.plotting.matplot_dep.plot_ARD self.plot_ARD.__doc__ += kernel_plots.plot_ARD.__doc__
"""
assert "matplotlib" in sys.modules, "matplotlib package has not been imported." assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
from ...plotting.matplot_dep import kernel_plots from ...plotting.matplot_dep import kernel_plots
return kernel_plots.plot_ARD(self,*args) return kernel_plots.plot_ARD(self,*args,**kw)
def input_sensitivity(self):
"""
Returns the sensitivity for each dimension of this kernel.
"""
return np.zeros(self.input_dim)
def __add__(self, other): def __add__(self, other):
""" Overloading of the '+' operator. for more control, see self.add """ """ Overloading of the '+' operator. for more control, see self.add """
return self.add(other) return self.add(other)

View file

@ -252,3 +252,6 @@ class Linear(Kern):
return np.dot(ZA, inner).swapaxes(0, 1) # NOTE: self.ZAinner \in [num_inducing x N x input_dim]! return np.dot(ZA, inner).swapaxes(0, 1) # NOTE: self.ZAinner \in [num_inducing x N x input_dim]!
def input_sensitivity(self):
if self.ARD: return self.variances
else: return self.variances.repeat(self.input_dim)

View file

@ -382,3 +382,7 @@ class RBF(Kern):
type_converters=weave.converters.blitz, **self.weave_options) type_converters=weave.converters.blitz, **self.weave_options)
return mudist, mudist_sq, psi2_exponent, psi2 return mudist, mudist_sq, psi2_exponent, psi2
def input_sensitivity(self):
if self.ARD: return 1./self.lengthscale
else: return (1./self.lengthscale).repeat(self.input_dim)

View file

@ -10,7 +10,7 @@ from ..inference.optimization import SCG
from ..util import linalg from ..util import linalg
from ..core.parameterization.variational import NormalPosterior, NormalPrior from ..core.parameterization.variational import NormalPosterior, NormalPrior
class BayesianGPLVM(SparseGP, GPLVM): class BayesianGPLVM(SparseGP):
""" """
Bayesian Gaussian Process Latent Variable Model Bayesian Gaussian Process Latent Variable Model
@ -25,7 +25,8 @@ class BayesianGPLVM(SparseGP, GPLVM):
def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10, def __init__(self, Y, input_dim, X=None, X_variance=None, init='PCA', num_inducing=10,
Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs): Z=None, kernel=None, inference_method=None, likelihood=None, name='bayesian gplvm', **kwargs):
if X == None: if X == None:
X = self.initialise_latent(init, input_dim, Y) from ..util.initialization import initialize_latent
X = initialize_latent(init, input_dim, Y)
self.init = init self.init = init
if X_variance is None: if X_variance is None:

View file

@ -28,28 +28,20 @@ class GPLVM(GP):
:type init: 'PCA'|'random' :type init: 'PCA'|'random'
""" """
if X is None: if X is None:
X = self.initialise_latent(init, input_dim, Y) from ..util.initialization import initialize_latent
X = initialize_latent(init, input_dim, Y)
if kernel is None: if kernel is None:
kernel = kern.rbf(input_dim, ARD=input_dim > 1) + kern.bias(input_dim, np.exp(-2)) kernel = kern.RBF(input_dim, ARD=input_dim > 1) + kern.Bias(input_dim, np.exp(-2))
likelihood = Gaussian() likelihood = Gaussian()
super(GPLVM, self).__init__(X, Y, kernel, likelihood, name='GPLVM') super(GPLVM, self).__init__(X, Y, kernel, likelihood, name='GPLVM')
self.X = Param('X', X) self.X = Param('latent_mean', X)
self.add_parameter(self.X, index=0) self.add_parameter(self.X, index=0)
def initialise_latent(self, init, input_dim, Y):
Xr = np.random.randn(Y.shape[0], input_dim)
if init == 'PCA':
PC = PCA(Y, input_dim)[0]
Xr[:PC.shape[0], :PC.shape[1]] = PC
else:
pass
return Xr
def parameters_changed(self): def parameters_changed(self):
GP.parameters_changed(self) super(GPLVM, self).parameters_changed()
self.X.gradient = self.kern.gradients_X(self.posterior.dL_dK, self.X) self.X.gradient = self.kern.gradients_X(self._dL_dK, self.X, None)
def _getstate(self): def _getstate(self):
return GP._getstate(self) return GP._getstate(self)
@ -79,7 +71,8 @@ class GPLVM(GP):
pb.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5) pb.plot(mu[:, 0], mu[:, 1], 'k', linewidth=1.5)
def plot_latent(self, *args, **kwargs): def plot_latent(self, *args, **kwargs):
return util.plot_latent.plot_latent(self, *args, **kwargs) from ..plotting.matplot_dep import dim_reduction_plots
return dim_reduction_plots.plot_latent(self, *args, **kwargs)
def plot_magnification(self, *args, **kwargs): def plot_magnification(self, *args, **kwargs):
return util.plot_latent.plot_magnification(self, *args, **kwargs) return util.plot_latent.plot_magnification(self, *args, **kwargs)

View file

@ -9,8 +9,41 @@ from matplotlib.transforms import offset_copy
from ...kern import Linear from ...kern import Linear
def add_bar_labels(fig, ax, bars, bottom=0):
transOffset = offset_copy(ax.transData, fig=fig,
x=0., y= -2., units='points')
transOffsetUp = offset_copy(ax.transData, fig=fig,
x=0., y=1., units='points')
for bar in bars:
for i, [patch, num] in enumerate(zip(bar.patches, np.arange(len(bar.patches)))):
if len(bottom) == len(bar): b = bottom[i]
else: b = bottom
height = patch.get_height() + b
xi = patch.get_x() + patch.get_width() / 2.
va = 'top'
c = 'w'
t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center')
transform = transOffset
if patch.get_extents().height <= t.get_extents().height + 3:
va = 'bottom'
c = 'k'
transform = transOffsetUp
ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform)
ax.set_xticks([])
def plot_bars(fig, ax, x, ard_params, color, name, bottom=0):
from ...util.misc import param_to_array
return ax.bar(left=x, height=param_to_array(ard_params), width=.8,
bottom=bottom, align='center',
color=color, edgecolor='k', linewidth=1.2,
label=name.replace("_"," "))
def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False): def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False):
"""If an ARD kernel is present, plot a bar representation using matplotlib """
If an ARD kernel is present, plot a bar representation using matplotlib
:param fignum: figure number of the plot :param fignum: figure number of the plot
:param ax: matplotlib axis to plot on :param ax: matplotlib axis to plot on
@ -24,50 +57,27 @@ def plot_ARD(kernel, fignum=None, ax=None, title='', legend=False):
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
else: else:
fig = ax.figure fig = ax.figure
if title is None:
ax.set_title('ARD parameters, %s kernel' % kernel.name)
else:
ax.set_title(title)
Tango.reset() Tango.reset()
xticklabels = []
bars = [] bars = []
x0 = 0
#for p in kernel._parameters_: ard_params = np.atleast_2d(kernel.input_sensitivity())
p = kernel bottom = 0
c = Tango.nextMedium() x = np.arange(kernel.input_dim)
if hasattr(p, 'ARD') and p.ARD:
if title is None: for i in range(ard_params.shape[-1]):
ax.set_title('ARD parameters, %s kernel' % p.name) c = Tango.nextMedium()
else: bars.append(plot_bars(fig, ax, x, ard_params[:,i], c, kernel._parameters_[i].name, bottom=bottom))
ax.set_title(title) bottom += ard_params[:,i]
if isinstance(p, Linear):
ard_params = p.variances ax.set_xlim(-.5, kernel.input_dim - .5)
else: add_bar_labels(fig, ax, [bars[-1]], bottom=bottom-ard_params[:,i])
ard_params = 1. / p.lengthscale
x = np.arange(x0, x0 + len(ard_params))
from ...util.misc import param_to_array
bars.append(ax.bar(x, param_to_array(ard_params), align='center', color=c, edgecolor='k', linewidth=1.2, label=p.name.replace("_"," ")))
xticklabels.extend([r"$\mathrm{{{name}}}\ {x}$".format(name=p.name, x=i) for i in np.arange(len(ard_params))])
x0 += len(ard_params)
x = np.arange(x0)
transOffset = offset_copy(ax.transData, fig=fig,
x=0., y= -2., units='points')
transOffsetUp = offset_copy(ax.transData, fig=fig,
x=0., y=1., units='points')
for bar in bars:
for patch, num in zip(bar.patches, np.arange(len(bar.patches))):
height = patch.get_height()
xi = patch.get_x() + patch.get_width() / 2.
va = 'top'
c = 'w'
t = TextPath((0, 0), "${xi}$".format(xi=xi), rotation=0, usetex=True, ha='center')
transform = transOffset
if patch.get_extents().height <= t.get_extents().height + 3:
va = 'bottom'
c = 'k'
transform = transOffsetUp
ax.text(xi, height, "${xi}$".format(xi=int(num)), color=c, rotation=0, ha='center', va=va, transform=transform)
# for xi, t in zip(x, xticklabels):
# ax.text(xi, maxi / 2, t, rotation=90, ha='center', va='center')
# ax.set_xticklabels(xticklabels, rotation=17)
ax.set_xticks([])
ax.set_xlim(-.5, x0 - .5)
if legend: if legend:
if title is '': if title is '':
mode = 'expand' mode = 'expand'

View file

@ -13,6 +13,7 @@ import classification
import subarray_and_sorting import subarray_and_sorting
import caching import caching
import diag import diag
import initialization
try: try:
import sympy import sympy