mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-04-27 22:06:22 +02:00
working mean function examples
This commit is contained in:
parent
254157ce04
commit
cf0e29b207
4 changed files with 51 additions and 4 deletions
|
|
@ -82,7 +82,7 @@ class GP(Model):
|
|||
assert isinstance(self.mean_function, Mapping)
|
||||
assert mean_function.input_dim == self.input_dim
|
||||
assert mean_function.output_dim == self.output_dim
|
||||
self.add_parameter(mean_function)
|
||||
self.link_parameter(mean_function)
|
||||
|
||||
|
||||
#find a sensible inference method
|
||||
|
|
@ -166,6 +166,8 @@ class GP(Model):
|
|||
self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y_normalized, self.mean_function, self.Y_metadata)
|
||||
self.likelihood.update_gradients(self.grad_dict['dL_dthetaL'])
|
||||
self.kern.update_gradients_full(self.grad_dict['dL_dK'], self.X)
|
||||
if self.mean_function is not None:
|
||||
self.mean_function.update_gradients(self.grad_dict['dL_dm'], self.X)
|
||||
|
||||
def log_likelihood(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -505,3 +505,48 @@ def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True):
|
|||
|
||||
print m
|
||||
return m
|
||||
|
||||
def simple_mean_function(max_iters=100, optimize=True, plot=True):
|
||||
"""
|
||||
The simplest possible mean function. No parameters, just a simple Sinusoid.
|
||||
"""
|
||||
#create simple mean function
|
||||
mf = GPy.core.Mapping(1,1)
|
||||
mf.f = np.sin
|
||||
mf.update_gradients = lambda a,b: None
|
||||
|
||||
X = np.linspace(0,10,50).reshape(-1,1)
|
||||
Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape)
|
||||
|
||||
k =GPy.kern.RBF(1)
|
||||
lik = GPy.likelihoods.Gaussian()
|
||||
m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf)
|
||||
if optimize:
|
||||
m.optimize(max_iters=max_iters)
|
||||
if plot:
|
||||
m.plot(plot_limits=(-10,15))
|
||||
return m
|
||||
|
||||
def parametric_mean_function(max_iters=100, optimize=True, plot=True):
|
||||
"""
|
||||
A linear mean function with parameters that we'll learn alongside the kernel
|
||||
"""
|
||||
#create simple mean function
|
||||
mf = GPy.core.Mapping(1,1)
|
||||
mf.f = np.sin
|
||||
|
||||
X = np.linspace(0,10,50).reshape(-1,1)
|
||||
Y = np.sin(X) + 0.5*np.cos(3*X) + 0.1*np.random.randn(*X.shape) + 3*X
|
||||
|
||||
mf = GPy.mappings.Linear(1,1)
|
||||
|
||||
k =GPy.kern.RBF(1)
|
||||
lik = GPy.likelihoods.Gaussian()
|
||||
m = GPy.core.GP(X, Y, kernel=k, likelihood=lik, mean_function=mf)
|
||||
if optimize:
|
||||
m.optimize(max_iters=max_iters)
|
||||
if plot:
|
||||
m.plot()
|
||||
return m
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -63,4 +63,4 @@ class ExactGaussianInference(LatentFunctionInference):
|
|||
|
||||
dL_dthetaL = likelihood.exact_inference_gradients(np.diag(dL_dK),Y_metadata)
|
||||
|
||||
return Posterior(woodbury_chol=LW, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
|
||||
return Posterior(woodbury_chol=LW, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL, 'dL_dm':alpha}
|
||||
|
|
|
|||
|
|
@ -26,8 +26,8 @@ class Linear(Mapping):
|
|||
|
||||
def __init__(self, input_dim, output_dim, name='linmap'):
|
||||
Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim, name=name)
|
||||
self.A = GPy.core.Param('A', np.random.randn(self.input_dim, self.output_dim))
|
||||
self.add_parameter(self.A)
|
||||
self.A = Param('A', np.random.randn(self.input_dim, self.output_dim))
|
||||
self.link_parameter(self.A)
|
||||
|
||||
def f(self, X):
|
||||
return np.dot(X, self.A)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue