diff --git a/GPy/core/mapping.py b/GPy/core/mapping.py index ef0af16c..efd9476f 100644 --- a/GPy/core/mapping.py +++ b/GPy/core/mapping.py @@ -10,11 +10,11 @@ class Mapping(Parameterized): Base model for shared behavior between models that can act like a mapping. """ - def __init__(self, input_dim, output_dim): + def __init__(self, input_dim, output_dim, name='mapping'): self.input_dim = input_dim self.output_dim = output_dim - super(Mapping, self).__init__() + super(Mapping, self).__init__(name=name) # Model.__init__(self) # All leaf nodes should call self._set_params(self._get_params()) at # the end diff --git a/GPy/mappings/linear.py b/GPy/mappings/linear.py index 5846903d..075b8556 100644 --- a/GPy/mappings/linear.py +++ b/GPy/mappings/linear.py @@ -3,6 +3,7 @@ import numpy as np from ..core.mapping import Mapping +from ..core.parameterization import Param class Linear(Mapping): """ @@ -16,38 +17,22 @@ class Linear(Mapping): :type X: ndarray :param output_dim: dimension of output. :type output_dim: int - + """ - def __init__(self, input_dim=1, output_dim=1): - self.name = 'linear' - Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim) - self.num_params = self.output_dim*(self.input_dim + 1) - self.W = np.array((self.input_dim, self.output_dim)) - self.bias = np.array(self.output_dim) - self.randomize() - - def _get_param_names(self): - return sum([['W_%i_%i' % (n, d) for d in range(self.output_dim)] for n in range(self.input_dim)], []) + ['bias_%i' % d for d in range(self.output_dim)] - - def _get_params(self): - return np.hstack((self.W.flatten(), self.bias)) - - def _set_params(self, x): - self.W = x[:self.input_dim * self.output_dim].reshape(self.input_dim, self.output_dim).copy() - self.bias = x[self.input_dim*self.output_dim:].copy() - def randomize(self): - self.W = np.random.randn(self.input_dim, self.output_dim)/np.sqrt(self.input_dim + 1) - self.bias = np.random.randn(self.output_dim)/np.sqrt(self.input_dim + 1) + def __init__(self, input_dim=1, output_dim=1, name='linear_map'): + Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim, name=name) + self.W = Param('W',np.array((self.input_dim, self.output_dim))) + self.bias = Param('bias',np.array(self.output_dim)) + self.add_parameters(self.W, self.bias) def f(self, X): return np.dot(X,self.W) + self.bias def df_dtheta(self, dL_df, X): - self._df_dW = (dL_df[:, :, None]*X[:, None, :]).sum(0).T - self._df_dbias = (dL_df.sum(0)) - return np.hstack((self._df_dW.flatten(), self._df_dbias)) - - def df_dX(self, dL_df, X): - return (dL_df[:, None, :]*self.W[None, :, :]).sum(2) - + df_dW = (dL_df[:, :, None]*X[:, None, :]).sum(0).T + df_dbias = (dL_df.sum(0)) + return np.hstack((df_dW.flatten(), df_dbias)) + + def dL_dX(self, dL_df, X): + return (dL_df[:, None, :]*self.W[None, :, :]).sum(2)