a little work on mappings

This commit is contained in:
James Hensman 2015-03-23 12:38:15 +00:00
parent 1b46a99e75
commit 99545500b1
2 changed files with 24 additions and 68 deletions

View file

@ -74,72 +74,4 @@ class Bijective_mapping(Mapping):
"""Inverse mapping from output domain of the function to the inputs.""" """Inverse mapping from output domain of the function to the inputs."""
raise NotImplementedError raise NotImplementedError
from model import Model
class Mapping_check_model(Model):
"""
This is a dummy model class used as a base class for checking that the
gradients of a given mapping are implemented correctly. It enables
checkgradient() to be called independently on each mapping.
"""
def __init__(self, mapping=None, dL_df=None, X=None):
num_samples = 20
if mapping==None:
mapping = GPy.mapping.linear(1, 1)
if X==None:
X = np.random.randn(num_samples, mapping.input_dim)
if dL_df==None:
dL_df = np.ones((num_samples, mapping.output_dim))
self.mapping=mapping
self.X = X
self.dL_df = dL_df
self.num_params = self.mapping.num_params
Model.__init__(self)
def _get_params(self):
return self.mapping._get_params()
def _get_param_names(self):
return self.mapping._get_param_names()
def _set_params(self, x):
self.mapping._set_params(x)
def log_likelihood(self):
return (self.dL_df*self.mapping.f(self.X)).sum()
def _log_likelihood_gradients(self):
raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class."
class Mapping_check_df_dtheta(Mapping_check_model):
"""This class allows gradient checks for the gradient of a mapping with respect to parameters. """
def __init__(self, mapping=None, dL_df=None, X=None):
Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X)
def _log_likelihood_gradients(self):
return self.mapping.df_dtheta(self.dL_df, self.X)
class Mapping_check_df_dX(Mapping_check_model):
"""This class allows gradient checks for the gradient of a mapping with respect to X. """
def __init__(self, mapping=None, dL_df=None, X=None):
Mapping_check_model.__init__(self,mapping=mapping,dL_df=dL_df, X=X)
if dL_df==None:
dL_df = np.ones((self.X.shape[0],self.mapping.output_dim))
self.num_params = self.X.shape[0]*self.mapping.input_dim
def _log_likelihood_gradients(self):
return self.mapping.df_dX(self.dL_df, self.X).flatten()
def _get_param_names(self):
return ['X_' +str(i) + ','+str(j) for j in range(self.X.shape[1]) for i in range(self.X.shape[0])]
def _get_params(self):
return self.X.flatten()
def _set_params(self, x):
self.X=x.reshape(self.X.shape)

View file

@ -5,6 +5,30 @@ import unittest
import numpy as np import numpy as np
import GPy import GPy
class MappingGradChecker(GPy.core.Model):
"""
This class has everything we need to check the gradient of a mapping. It
implement a simple likelihood which is the sum of the outputs of the
mapping. the gradients are checked against the parameters of the mapping
and the input.
"""
def __init__(self, mapping, X, name):
super(MappingChecker).__init__(self, name)
self.mapping = mapping
self.add_parameter(self.mapping)
self.X = GPy.core.Param('X',X)
self.add_parameter(self.X)
self.dL_dY = np.ones((self.X.shape[0]. self.mapping.output_dim))
def log_likelihood(self):
return np.sum(self.mapping.f(X))
def parameters_changed(self):
self.X.gradient = self.mapping.gradients_X(self.dL_dY, self.X)
self.mapping.update_gradients(self.dL_dY, self.X)
class MappingTests(unittest.TestCase): class MappingTests(unittest.TestCase):