active_dims as extra parameter for kernels, it tells which input dimensions to work on

This commit is contained in:
Max Zwiessele 2014-03-14 09:18:08 +00:00
parent 328e0124c7
commit efcce6d0af
12 changed files with 73 additions and 59 deletions

View file

@ -31,8 +31,8 @@ class MLP(Kern):
"""
def __init__(self, input_dim, variance=1., weight_variance=1., bias_variance=100., name='mlp'):
super(MLP, self).__init__(input_dim, name)
def __init__(self, input_dim, variance=1., weight_variance=1., bias_variance=100., active_dims=None, name='mlp'):
super(MLP, self).__init__(input_dim, active_dims, name)
self.variance = Param('variance', variance, Logexp())
self.weight_variance = Param('weight_variance', weight_variance, Logexp())
self.bias_variance = Param('bias_variance', bias_variance, Logexp())