From cf0e29b207bc8004e635c20c002ed9c6cfed3387 Mon Sep 17 00:00:00 2001 From: James Hensman Date: Thu, 26 Mar 2015 08:48:56 +0000 Subject: [PATCH] working mean function examples --- GPy/core/gp.py | 4 +- GPy/examples/regression.py | 45 +++++++++++++++++++ .../exact_gaussian_inference.py | 2 +- GPy/mappings/linear.py | 4 +- 4 files changed, 51 insertions(+), 4 deletions(-) diff --git a/GPy/core/gp.py b/GPy/core/gp.py index d415d995..fd39069d 100644 --- a/GPy/core/gp.py +++ b/GPy/core/gp.py @@ -82,7 +82,7 @@ class GP(Model): assert isinstance(self.mean_function, Mapping) assert mean_function.input_dim == self.input_dim assert mean_function.output_dim == self.output_dim - self.add_parameter(mean_function) + self.link_parameter(mean_function) #find a sensible inference method @@ -166,6 +166,8 @@ class GP(Model): self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y_normalized, self.mean_function, self.Y_metadata) self.likelihood.update_gradients(self.grad_dict['dL_dthetaL']) self.kern.update_gradients_full(self.grad_dict['dL_dK'], self.X) + if self.mean_function is not None: + self.mean_function.update_gradients(self.grad_dict['dL_dm'], self.X) def log_likelihood(self): """ diff --git a/GPy/examples/regression.py b/GPy/examples/regression.py index 37a18f63..0e68d0bf 100644 --- a/GPy/examples/regression.py +++ b/GPy/examples/regression.py @@ -505,3 +505,48 @@ def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True): print m return m + +def simple_mean_function(max_iters=100, optimize=True, plot=True): + """ + The simplest possible mean function. No parameters, just a simple Sinusoid. + """ + #create simple mean function + mf = GPy.core.Mapping(1,1) + mf.f = np.sin + mf.update_gradients = lambda a,b: None + + X = np.linspace(0,10,50).reshape(-1,1) + Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) + + k =GPy.kern.RBF(1) + lik = GPy.likelihoods.Gaussian() + m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf) + if optimize: + m.optimize(max_iters=max_iters) + if plot: + m.plot(plot_limits=(-10,15)) + return m + +def parametric_mean_function(max_iters=100, optimize=True, plot=True): + """ + A linear mean function with parameters that we'll learn alongside the kernel + """ + #create simple mean function + mf = GPy.core.Mapping(1,1) + mf.f = np.sin + + X = np.linspace(0,10,50).reshape(-1,1) + Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) + 3*X + + mf = GPy.mappings.Linear(1,1) + + k =GPy.kern.RBF(1) + lik = GPy.likelihoods.Gaussian() + m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf) + if optimize: + m.optimize(max_iters=max_iters) + if plot: + m.plot() + return m + + diff --git a/GPy/inference/latent_function_inference/exact_gaussian_inference.py b/GPy/inference/latent_function_inference/exact_gaussian_inference.py index 4c6b70df..b2f1b7d0 100644 --- a/GPy/inference/latent_function_inference/exact_gaussian_inference.py +++ b/GPy/inference/latent_function_inference/exact_gaussian_inference.py @@ -63,4 +63,4 @@ class ExactGaussianInference(LatentFunctionInference): dL_dthetaL = likelihood.exact_inference_gradients(np.diag(dL_dK),Y_metadata) - return Posterior(woodbury_chol=LW, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL} + return Posterior(woodbury_chol=LW, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL, 'dL_dm':alpha} diff --git a/GPy/mappings/linear.py b/GPy/mappings/linear.py index 6fc91944..ee464694 100644 --- a/GPy/mappings/linear.py +++ b/GPy/mappings/linear.py @@ -26,8 +26,8 @@ class Linear(Mapping): def __init__(self, input_dim, output_dim, name='linmap'): Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim, name=name) - self.A = GPy.core.Param('A', np.random.randn(self.input_dim, self.output_dim)) - self.add_parameter(self.A) + self.A = Param('A', np.random.randn(self.input_dim, self.output_dim)) + self.link_parameter(self.A) def f(self, X): return np.dot(X, self.A)