mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-04-28 22:36:24 +02:00
some tidying in the regression examples
This commit is contained in:
parent
eafcd50af5
commit
c86981a110
1 changed files with 119 additions and 116 deletions
|
|
@ -1,7 +1,6 @@
|
|||
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
|
||||
|
||||
"""
|
||||
Gaussian Processes regression examples
|
||||
"""
|
||||
|
|
@ -9,88 +8,107 @@ import pylab as pb
|
|||
import numpy as np
|
||||
import GPy
|
||||
|
||||
def coregionalization_toy2(max_iters=100):
|
||||
def olympic_marathon_men(optimize=True, plot=True):
|
||||
"""Run a standard Gaussian process regression on the Olympic marathon data."""
|
||||
data = GPy.util.datasets.olympic_marathon_men()
|
||||
|
||||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'])
|
||||
|
||||
# set the lengthscale to be something sensible (defaults to 1)
|
||||
m['rbf_lengthscale'] = 10
|
||||
|
||||
if optimize:
|
||||
m.optimize('bfgs', max_iters=200)
|
||||
if plot:
|
||||
m.plot(plot_limits=(1850, 2050))
|
||||
|
||||
return m
|
||||
|
||||
def coregionalization_toy2(optimize=True, plot=True):
|
||||
"""
|
||||
A simple demonstration of coregionalization on two sinusoidal functions.
|
||||
"""
|
||||
#build a design matrix with a column of integers indicating the output
|
||||
X1 = np.random.rand(50, 1) * 8
|
||||
X2 = np.random.rand(30, 1) * 5
|
||||
index = np.vstack((np.zeros_like(X1), np.ones_like(X2)))
|
||||
X = np.hstack((np.vstack((X1, X2)), index))
|
||||
|
||||
#build a suitable set of observed variables
|
||||
Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
||||
Y2 = np.sin(X2) + np.random.randn(*X2.shape) * 0.05 + 2.
|
||||
Y = np.vstack((Y1, Y2))
|
||||
|
||||
#build the kernel
|
||||
k1 = GPy.kern.rbf(1) + GPy.kern.bias(1)
|
||||
k2 = GPy.kern.coregionalize(2,1)
|
||||
k = k1**k2 #k = k1.prod(k2,tensor=True)
|
||||
k = k1**k2
|
||||
m = GPy.models.GPRegression(X, Y, kernel=k)
|
||||
m.constrain_fixed('.*rbf_var', 1.)
|
||||
# m.constrain_positive('.*kappa')
|
||||
m.optimize('sim', messages=1, max_iters=max_iters)
|
||||
|
||||
pb.figure()
|
||||
Xtest1 = np.hstack((np.linspace(0, 9, 100)[:, None], np.zeros((100, 1))))
|
||||
Xtest2 = np.hstack((np.linspace(0, 9, 100)[:, None], np.ones((100, 1))))
|
||||
mean, var, low, up = m.predict(Xtest1)
|
||||
GPy.util.plot.gpplot(Xtest1[:, 0], mean, low, up)
|
||||
mean, var, low, up = m.predict(Xtest2)
|
||||
GPy.util.plot.gpplot(Xtest2[:, 0], mean, low, up)
|
||||
pb.plot(X1[:, 0], Y1[:, 0], 'rx', mew=2)
|
||||
pb.plot(X2[:, 0], Y2[:, 0], 'gx', mew=2)
|
||||
if optimize:
|
||||
m.optimize('bfgs', max_iters=100)
|
||||
|
||||
if plot:
|
||||
m.plot(fixed_inputs=[(1,0)])
|
||||
m.plot(fixed_inputs=[(1,1)], ax=pb.gca())
|
||||
|
||||
return m
|
||||
|
||||
def coregionalization_toy(max_iters=100):
|
||||
"""
|
||||
A simple demonstration of coregionalization on two sinusoidal functions.
|
||||
"""
|
||||
X1 = np.random.rand(50, 1) * 8
|
||||
X2 = np.random.rand(30, 1) * 5
|
||||
X = np.vstack((X1, X2))
|
||||
Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
||||
Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05
|
||||
Y = np.vstack((Y1, Y2))
|
||||
#FIXME: Needs recovering once likelihoods are consolidated
|
||||
#def coregionalization_toy(optimize=True, plot=True):
|
||||
# """
|
||||
# A simple demonstration of coregionalization on two sinusoidal functions.
|
||||
# """
|
||||
# X1 = np.random.rand(50, 1) * 8
|
||||
# X2 = np.random.rand(30, 1) * 5
|
||||
# X = np.vstack((X1, X2))
|
||||
# Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
||||
# Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05
|
||||
# Y = np.vstack((Y1, Y2))
|
||||
#
|
||||
# k1 = GPy.kern.rbf(1)
|
||||
# m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1])
|
||||
# m.constrain_fixed('.*rbf_var', 1.)
|
||||
# m.optimize(max_iters=100)
|
||||
#
|
||||
# fig, axes = pb.subplots(2,1)
|
||||
# m.plot(fixed_inputs=[(1,0)],ax=axes[0])
|
||||
# m.plot(fixed_inputs=[(1,1)],ax=axes[1])
|
||||
# axes[0].set_title('Output 0')
|
||||
# axes[1].set_title('Output 1')
|
||||
# return m
|
||||
|
||||
k1 = GPy.kern.rbf(1)
|
||||
m = GPy.models.GPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1])
|
||||
m.constrain_fixed('.*rbf_var', 1.)
|
||||
m.optimize(max_iters=max_iters)
|
||||
|
||||
fig, axes = pb.subplots(2,1)
|
||||
m.plot(fixed_inputs=[(1,0)],ax=axes[0])
|
||||
m.plot(fixed_inputs=[(1,1)],ax=axes[1])
|
||||
axes[0].set_title('Output 0')
|
||||
axes[1].set_title('Output 1')
|
||||
return m
|
||||
|
||||
def coregionalization_sparse(max_iters=100):
|
||||
def coregionalization_sparse(optimize=True, plot=True):
|
||||
"""
|
||||
A simple demonstration of coregionalization on two sinusoidal functions using sparse approximations.
|
||||
"""
|
||||
X1 = np.random.rand(500, 1) * 8
|
||||
X2 = np.random.rand(300, 1) * 5
|
||||
index = np.vstack((np.zeros_like(X1), np.ones_like(X2)))
|
||||
X = np.hstack((np.vstack((X1, X2)), index))
|
||||
Y1 = np.sin(X1) + np.random.randn(*X1.shape) * 0.05
|
||||
Y2 = -np.sin(X2) + np.random.randn(*X2.shape) * 0.05
|
||||
Y = np.vstack((Y1, Y2))
|
||||
#fetch the data from the non sparse examples
|
||||
m = coregionalization_toy2(optimize=False, plot=False)
|
||||
X, Y = m.X, m.likelihood.Y
|
||||
|
||||
k1 = GPy.kern.rbf(1)
|
||||
#construct a model
|
||||
m = GPy.models.SparseGPRegression(X,Y)
|
||||
m.constrain_fixed('iip_\d+_1') # don't optimize the inducing input indexes
|
||||
|
||||
m = GPy.models.SparseGPMultioutputRegression(X_list=[X1,X2],Y_list=[Y1,Y2],kernel_list=[k1],num_inducing=5)
|
||||
m.constrain_fixed('.*rbf_var',1.)
|
||||
#m.optimize(messages=1)
|
||||
m.optimize_restarts(5, robust=True, messages=1, max_iters=max_iters, optimizer='bfgs')
|
||||
if optimize:
|
||||
m.optimize('bfgs', max_iters=100, messages=1)
|
||||
|
||||
if plot:
|
||||
m.plot(fixed_inputs=[(1,0)])
|
||||
m.plot(fixed_inputs=[(1,1)], ax=pb.gca())
|
||||
|
||||
fig, axes = pb.subplots(2,1)
|
||||
m.plot_single_output(output=0,ax=axes[0],plot_limits=(-1,9))
|
||||
m.plot_single_output(output=1,ax=axes[1],plot_limits=(-1,9))
|
||||
axes[0].set_title('Output 0')
|
||||
axes[1].set_title('Output 1')
|
||||
return m
|
||||
|
||||
def epomeo_gpx(max_iters=100):
|
||||
"""Perform Gaussian process regression on the latitude and longitude data from the Mount Epomeo runs. Requires gpxpy to be installed on your system to load in the data."""
|
||||
|
||||
|
||||
def epomeo_gpx(optimize=True, plot=True):
|
||||
"""
|
||||
Perform Gaussian process regression on the latitude and longitude data
|
||||
from the Mount Epomeo runs. Requires gpxpy to be installed on your system
|
||||
to load in the data.
|
||||
"""
|
||||
data = GPy.util.datasets.epomeo_gpx()
|
||||
num_data_list = []
|
||||
for Xpart in data['X']:
|
||||
|
|
@ -119,14 +137,17 @@ def epomeo_gpx(max_iters=100):
|
|||
m.constrain_fixed('.*rbf_var', 1.)
|
||||
m.constrain_fixed('iip')
|
||||
m.constrain_bounded('noise_variance', 1e-3, 1e-1)
|
||||
# m.optimize_restarts(5, robust=True, messages=1, max_iters=max_iters, optimizer='bfgs')
|
||||
m.optimize(max_iters=max_iters,messages=True)
|
||||
|
||||
return m
|
||||
|
||||
|
||||
def multiple_optima(gene_number=937, resolution=80, model_restarts=10, seed=10000, max_iters=300):
|
||||
"""Show an example of a multimodal error surface for Gaussian process regression. Gene 939 has bimodal behaviour where the noisy mode is higher."""
|
||||
"""
|
||||
Show an example of a multimodal error surface for Gaussian process
|
||||
regression. Gene 939 has bimodal behaviour where the noisy mode is
|
||||
higher.
|
||||
"""
|
||||
|
||||
# Contour over a range of length scales and signal/noise ratios.
|
||||
length_scales = np.linspace(0.1, 60., resolution)
|
||||
|
|
@ -175,12 +196,15 @@ def multiple_optima(gene_number=937, resolution=80, model_restarts=10, seed=1000
|
|||
return m # (models, lls)
|
||||
|
||||
def _contour_data(data, length_scales, log_SNRs, kernel_call=GPy.kern.rbf):
|
||||
"""Evaluate the GP objective function for a given data set for a range of signal to noise ratios and a range of lengthscales.
|
||||
"""
|
||||
Evaluate the GP objective function for a given data set for a range of
|
||||
signal to noise ratios and a range of lengthscales.
|
||||
|
||||
:data_set: A data set from the utils.datasets director.
|
||||
:length_scales: a list of length scales to explore for the contour plot.
|
||||
:log_SNRs: a list of base 10 logarithm signal to noise ratios to explore for the contour plot.
|
||||
:kernel: a kernel to use for the 'signal' portion of the data."""
|
||||
:kernel: a kernel to use for the 'signal' portion of the data.
|
||||
"""
|
||||
|
||||
lls = []
|
||||
total_var = np.var(data['Y'])
|
||||
|
|
@ -203,79 +227,58 @@ def _contour_data(data, length_scales, log_SNRs, kernel_call=GPy.kern.rbf):
|
|||
return np.array(lls)
|
||||
|
||||
|
||||
def olympic_100m_men(max_iters=100, kernel=None):
|
||||
def olympic_100m_men(optimize=True, plot=True):
|
||||
"""Run a standard Gaussian process regression on the Rogers and Girolami olympics data."""
|
||||
data = GPy.util.datasets.olympic_100m_men()
|
||||
|
||||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'], kernel)
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'])
|
||||
|
||||
# set the lengthscale to be something sensible (defaults to 1)
|
||||
if kernel==None:
|
||||
m['rbf_lengthscale'] = 10
|
||||
m['rbf_lengthscale'] = 10
|
||||
|
||||
# optimize
|
||||
m.optimize(max_iters=max_iters)
|
||||
if optimize:
|
||||
m.optimize('bfgs', max_iters=200)
|
||||
|
||||
# plot
|
||||
m.plot(plot_limits=(1850, 2050))
|
||||
print(m)
|
||||
if plot:
|
||||
m.plot(plot_limits=(1850, 2050))
|
||||
return m
|
||||
|
||||
def olympic_marathon_men(max_iters=100, kernel=None):
|
||||
"""Run a standard Gaussian process regression on the Olympic marathon data."""
|
||||
data = GPy.util.datasets.olympic_marathon_men()
|
||||
|
||||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'], kernel)
|
||||
|
||||
# set the lengthscale to be something sensible (defaults to 1)
|
||||
if kernel==None:
|
||||
m['rbf_lengthscale'] = 10
|
||||
|
||||
# optimize
|
||||
m.optimize(max_iters=max_iters)
|
||||
|
||||
# plot
|
||||
m.plot(plot_limits=(1850, 2050))
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def toy_rbf_1d(optimizer='tnc', max_nb_eval_optim=100):
|
||||
def toy_rbf_1d(optimize=True, plot=True):
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
data = GPy.util.datasets.toy_rbf_1d()
|
||||
|
||||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'])
|
||||
|
||||
# optimize
|
||||
m.optimize(optimizer, max_f_eval=max_nb_eval_optim)
|
||||
# plot
|
||||
m.plot()
|
||||
print(m)
|
||||
if optimize:
|
||||
m.optimize('bfgs')
|
||||
if plot:
|
||||
m.plot()
|
||||
|
||||
return m
|
||||
|
||||
def toy_rbf_1d_50(max_iters=100):
|
||||
def toy_rbf_1d_50(optimize=True, plot=True):
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
data = GPy.util.datasets.toy_rbf_1d_50()
|
||||
|
||||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(data['X'], data['Y'])
|
||||
|
||||
# optimize
|
||||
m.optimize(max_iters=max_iters)
|
||||
if optimize:
|
||||
m.optimize('bfgs')
|
||||
if plot:
|
||||
m.plot()
|
||||
|
||||
# plot
|
||||
m.plot()
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def toy_poisson_rbf_1d(optimizer='bfgs', max_nb_eval_optim=100):
|
||||
|
||||
def toy_poisson_rbf_1d(optimize=True, plot=True):
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
x_len = 400
|
||||
X = np.linspace(0, 10, x_len)[:, None]
|
||||
f_true = np.random.multivariate_normal(np.zeros(x_len), GPy.kern.rbf(1).K(X))
|
||||
Y = np.array([np.random.poisson(np.exp(f)) for f in f_true])[:,None]
|
||||
Y = np.array([np.random.poisson(np.exp(f)) for f in f_true]).reshape(x_len,1)
|
||||
|
||||
noise_model = GPy.likelihoods.poisson()
|
||||
likelihood = GPy.likelihoods.EP(Y,noise_model)
|
||||
|
|
@ -283,14 +286,14 @@ def toy_poisson_rbf_1d(optimizer='bfgs', max_nb_eval_optim=100):
|
|||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(X, Y, likelihood=likelihood)
|
||||
|
||||
# optimize
|
||||
m.optimize(optimizer, max_f_eval=max_nb_eval_optim)
|
||||
# plot
|
||||
m.plot()
|
||||
print(m)
|
||||
if optimize:
|
||||
m.optimize('bfgs')
|
||||
if plot:
|
||||
m.plot()
|
||||
|
||||
return m
|
||||
|
||||
def toy_poisson_rbf_1d_laplace(optimizer='bfgs', max_nb_eval_optim=100):
|
||||
def toy_poisson_rbf_1d_laplace(optimize=True, plot=True):
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
x_len = 30
|
||||
X = np.linspace(0, 10, x_len)[:, None]
|
||||
|
|
@ -303,13 +306,13 @@ def toy_poisson_rbf_1d_laplace(optimizer='bfgs', max_nb_eval_optim=100):
|
|||
# create simple GP Model
|
||||
m = GPy.models.GPRegression(X, Y, likelihood=likelihood)
|
||||
|
||||
# optimize
|
||||
m.optimize(optimizer, max_f_eval=max_nb_eval_optim)
|
||||
# plot
|
||||
m.plot()
|
||||
# plot the real underlying rate function
|
||||
pb.plot(X, np.exp(f_true), '--k', linewidth=2)
|
||||
print(m)
|
||||
if optimize:
|
||||
m.optimize(optimizer, max_f_eval=max_nb_eval_optim)
|
||||
if plot:
|
||||
m.plot()
|
||||
# plot the real underlying rate function
|
||||
pb.plot(X, np.exp(f_true), '--k', linewidth=2)
|
||||
|
||||
return m
|
||||
|
||||
|
||||
|
|
@ -459,7 +462,7 @@ def sparse_GP_regression_2D(num_samples=400, num_inducing=50, max_iters=100):
|
|||
print(m)
|
||||
return m
|
||||
|
||||
def uncertain_inputs_sparse_regression(max_iters=100):
|
||||
def uncertain_inputs_sparse_regression(optimize=True, plot=True):
|
||||
"""Run a 1D example of a sparse GP regression with uncertain inputs."""
|
||||
fig, axes = pb.subplots(1, 2, figsize=(12, 5))
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue