From 99545500b125d8b247101619c4a4051a786e2108 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Mon, 23 Mar 2015 12:38:15 +0000 Subject: [PATCH] a little work on mappings --- GPy/core/mapping.py | 68 ---------------------------------- GPy/old_tests/mapping_tests.py | 24 ++++++++++++ 2 files changed, 24 insertions(+), 68 deletions(-) diff --git a/GPy/core/mapping.py b/GPy/core/mapping.py index 111fec6f..25fe504a 100644 --- a/GPy/core/mapping.py +++ b/GPy/core/mapping.py @@ -74,72 +74,4 @@ class Bijective_mapping(Mapping): """Inverse mapping from output domain of the function to the inputs.""" raise NotImplementedError -from model import Model - -class Mapping_check_model(Model): - """ - This is a dummy model class used as a base class for checking that the - gradients of a given mapping are implemented correctly. It enables - checkgradient() to be called independently on each mapping. - """ - def __init__(self, mapping=None, dL_df=None, X=None): - num_samples = 20 - if mapping==None: - mapping = GPy.mapping.linear(1, 1) - if X==None: - X = np.random.randn(num_samples, mapping.input_dim) - if dL_df==None: - dL_df = np.ones((num_samples, mapping.output_dim)) - - self.mapping=mapping - self.X = X - self.dL_df = dL_df - self.num_params = self.mapping.num_params - Model.__init__(self) - - - def _get_params(self): - return self.mapping._get_params() - - def _get_param_names(self): - return self.mapping._get_param_names() - - def _set_params(self, x): - self.mapping._set_params(x) - - def log_likelihood(self): - return (self.dL_df*self.mapping.f(self.X)).sum() - - def _log_likelihood_gradients(self): - raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class." - -class Mapping_check_df_dtheta(Mapping_check_model): - """This class allows gradient checks for the gradient of a mapping with respect to parameters. """ - def __init__(self, mapping=None, dL_df=None, X=None): - Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X) - - def _log_likelihood_gradients(self): - return self.mapping.df_dtheta(self.dL_df, self.X) - - -class Mapping_check_df_dX(Mapping_check_model): - """This class allows gradient checks for the gradient of a mapping with respect to X. """ - def __init__(self, mapping=None, dL_df=None, X=None): - Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X) - - if dL_df==None: - dL_df = np.ones((self.X.shape[0],self.mapping.output_dim)) - self.num_params = self.X.shape[0]*self.mapping.input_dim - - def _log_likelihood_gradients(self): - return self.mapping.df_dX(self.dL_df, self.X).flatten() - - def _get_param_names(self): - return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])] - - def _get_params(self): - return self.X.flatten() - - def _set_params(self, x): - self.X=x.reshape(self.X.shape) diff --git a/GPy/old_tests/mapping_tests.py b/GPy/old_tests/mapping_tests.py index d501df1d..8e4f250d 100644 --- a/GPy/old_tests/mapping_tests.py +++ b/GPy/old_tests/mapping_tests.py @@ -5,6 +5,30 @@ import unittest import numpy as np import GPy +class MappingGradChecker(GPy.core.Model): + """ + This class has everything we need to check the gradient of a mapping. It + implement a simple likelihood which is the sum of the outputs of the + mapping. the gradients are checked against the parameters of the mapping + and the input. + """ + def __init__(self, mapping, X, name): + super(MappingChecker).__init__(self, name) + self.mapping = mapping + self.add_parameter(self.mapping) + self.X = GPy.core.Param('X',X) + self.add_parameter(self.X) + self.dL_dY = np.ones((self.X.shape[0]. self.mapping.output_dim)) + def log_likelihood(self): + return np.sum(self.mapping.f(X)) + def parameters_changed(self): + self.X.gradient = self.mapping.gradients_X(self.dL_dY, self.X) + self.mapping.update_gradients(self.dL_dY, self.X) + + + + + class MappingTests(unittest.TestCase):