mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-11 13:02:38 +02:00
examples
This commit is contained in:
parent
aa13e095a9
commit
ba93455c09
9 changed files with 464 additions and 0 deletions
24
GPy/examples/GPLVM_demo.py
Normal file
24
GPy/examples/GPLVM_demo.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import numpy as np
|
||||
import pylab as pb
|
||||
import GPy
|
||||
np.random.seed(1)
|
||||
print "GPLVM with RBF kernel"
|
||||
|
||||
N = 100
|
||||
Q = 1
|
||||
D = 2
|
||||
X = np.random.rand(N, Q)
|
||||
k = GPy.kern.rbf(Q, 1.0, 2.0) + GPy.kern.white(Q, 0.00001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,D).T
|
||||
|
||||
m = GPy.models.GPLVM(Y, Q)
|
||||
m.constrain_positive('(rbf|bias|white)')
|
||||
|
||||
pb.figure()
|
||||
m.plot()
|
||||
pb.title('PCA initialisation')
|
||||
pb.figure()
|
||||
m.optimize(messages = 1)
|
||||
m.plot()
|
||||
pb.title('After optimisation')
|
||||
56
GPy/examples/GP_regression_demo.py
Normal file
56
GPy/examples/GP_regression_demo.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
"""
|
||||
Simple Gaussian Processes regression with an RBF kernel
|
||||
"""
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
pb.ion()
|
||||
pb.close('all')
|
||||
|
||||
|
||||
######################################
|
||||
## 1 dimensional example
|
||||
|
||||
# sample inputs and outputs
|
||||
X = np.random.uniform(-3.,3.,(20,1))
|
||||
Y = np.sin(X)+np.random.randn(20,1)*0.05
|
||||
|
||||
# construct kernel
|
||||
rbf = GPy.kern.rbf(1)
|
||||
noise = GPy.kern.white(1)
|
||||
kernel = rbf + noise
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(X,Y, kernel=kernel)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
# optimize and plot
|
||||
m.optimize('rasm', max_f_eval = 1000)
|
||||
m.plot()
|
||||
print(m)
|
||||
|
||||
######################################
|
||||
## 2 dimensional example
|
||||
|
||||
# sample inputs and outputs
|
||||
X = np.random.uniform(-3.,3.,(40,2))
|
||||
Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(40,1)*0.05
|
||||
|
||||
# construct kernel
|
||||
rbf = GPy.kern.rbf(2)
|
||||
noise = GPy.kern.white(2)
|
||||
kernel = rbf + noise
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(X,Y)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
# optimize and plot
|
||||
pb.figure()
|
||||
m.optimize('rasm', max_f_eval = 1000)
|
||||
m.plot()
|
||||
print(m)
|
||||
|
||||
|
||||
29
GPy/examples/GP_regression_kern_demo.py
Normal file
29
GPy/examples/GP_regression_kern_demo.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
"""
|
||||
Simple one-dimensional Gaussian Processes with assorted kernel functions
|
||||
"""
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
|
||||
# sample inputs and outputs
|
||||
D = 1
|
||||
X = np.random.randn(10,D)*2
|
||||
X = np.linspace(-1.5,1.5,5)[:,None]
|
||||
X = np.append(X,[[5]],0)
|
||||
Y = np.sin(np.pi*X/2) #+np.random.randn(X.shape[0],1)*0.05
|
||||
|
||||
models = [GPy.models.GP_regression(X,Y, k) for k in (GPy.kern.rbf(D), GPy.kern.Matern52(D), GPy.kern.Matern32(D), GPy.kern.exponential(D), GPy.kern.linear(D) + GPy.kern.white(D), GPy.kern.bias(D) + GPy.kern.white(D))]
|
||||
|
||||
pb.figure(figsize=(12,8))
|
||||
for i,m in enumerate(models):
|
||||
m.constrain_positive('')
|
||||
m.optimize()
|
||||
pb.subplot(3,2,i+1)
|
||||
m.plot()
|
||||
#pb.title(m.kern.parts[0].name)
|
||||
|
||||
GPy.util.plot.align_subplots(3,2,(-3,6),(-2.5,2.5))
|
||||
|
||||
pb.show()
|
||||
|
||||
|
||||
93
GPy/examples/classification.py
Normal file
93
GPy/examples/classification.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
"""
|
||||
Simple Gaussian Processes classification
|
||||
"""
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
pb.ion()
|
||||
pb.close('all')
|
||||
|
||||
default_seed=10000
|
||||
######################################
|
||||
## 2 dimensional example
|
||||
def crescent_data(model_type='Full', inducing=10, seed=default_seed):
|
||||
"""Run a Gaussian process classification on the crescent data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood.
|
||||
|
||||
:param model_type: type of model to fit ['Full', 'FITC', 'DTC'].
|
||||
:param seed : seed value for data generation.
|
||||
:type seed: int
|
||||
:param inducing : number of inducing variables (only used for 'FITC' or 'DTC').
|
||||
:type inducing: int
|
||||
"""
|
||||
data = GPy.util.datasets.crescent_data(seed=seed)
|
||||
likelihood = GPy.inference.likelihoods.probit(data['Y'])
|
||||
|
||||
if model_type=='Full':
|
||||
m = GPy.models.simple_GP_EP(data['X'],likelihood)
|
||||
else:
|
||||
# create sparse GP EP model
|
||||
m = GPy.models.sparse_GP_EP(data['X'],likelihood=likelihood,inducing=inducing,ep_proxy=model_type)
|
||||
|
||||
m.approximate_likelihood()
|
||||
print(m)
|
||||
|
||||
# optimize
|
||||
m.em()
|
||||
print(m)
|
||||
|
||||
# plot
|
||||
m.plot()
|
||||
return m
|
||||
|
||||
def oil():
|
||||
"""Run a Gaussian process classification on the oil data. The demonstration calls the basic GP classification model and uses EP to approximate the likelihood."""
|
||||
data = GPy.util.datasets.oil()
|
||||
likelihood = GPy.inference.likelihoods.probit(data['Y'][:, 0:1])
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.simple_GP_EP(data['X'],likelihood)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
m.tie_param('lengthscale')
|
||||
m.approximate_likelihood()
|
||||
|
||||
# optimize
|
||||
m.optimize()
|
||||
|
||||
# plot
|
||||
#m.plot()
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def toy_linear_1d_classification(model_type='Full', inducing=4, seed=default_seed):
|
||||
"""Simple 1D classification example.
|
||||
:param model_type: type of model to fit ['Full', 'FITC', 'DTC'].
|
||||
:param seed : seed value for data generation (default is 4).
|
||||
:type seed: int
|
||||
:param inducing : number of inducing variables (only used for 'FITC' or 'DTC').
|
||||
:type inducing: int
|
||||
"""
|
||||
data = GPy.util.datasets.toy_linear_1d_classification(seed=seed)
|
||||
likelihood = GPy.inference.likelihoods.probit(data['Y'][:, 0:1])
|
||||
assert model_type in ('Full','DTC','FITC')
|
||||
|
||||
# create simple GP model
|
||||
if model_type=='Full':
|
||||
m = GPy.models.simple_GP_EP(data['X'],likelihood)
|
||||
else:
|
||||
# create sparse GP EP model
|
||||
m = GPy.models.sparse_GP_EP(data['X'],likelihood=likelihood,inducing=inducing,ep_proxy=model_type)
|
||||
|
||||
|
||||
m.constrain_positive('var')
|
||||
m.constrain_positive('len')
|
||||
m.tie_param('lengthscale')
|
||||
m.approximate_likelihood()
|
||||
|
||||
# Optimize and plot
|
||||
m.em(plot_all=False) # EM algorithm
|
||||
m.plot()
|
||||
|
||||
print(m)
|
||||
return m
|
||||
49
GPy/examples/oil_flow_demo.py
Normal file
49
GPy/examples/oil_flow_demo.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import cPickle as pickle
|
||||
import numpy as np
|
||||
import pylab as pb
|
||||
import GPy
|
||||
import pylab as plt
|
||||
np.random.seed(1)
|
||||
|
||||
def plot_oil(X, theta, labels, label):
|
||||
plt.figure()
|
||||
X = X[:,np.argsort(theta)[:2]]
|
||||
flow_type = (X[labels[:,0]==1])
|
||||
plt.plot(flow_type[:,0], flow_type[:,1], 'rx')
|
||||
flow_type = (X[labels[:,1]==1])
|
||||
plt.plot(flow_type[:,0], flow_type[:,1], 'gx')
|
||||
flow_type = (X[labels[:,2]==1])
|
||||
plt.plot(flow_type[:,0], flow_type[:,1], 'bx')
|
||||
plt.title(label)
|
||||
|
||||
data = pickle.load(open('../util/datasets/oil_flow_3classes.pickle', 'r'))
|
||||
|
||||
Y = data['DataTrn']
|
||||
N, D = Y.shape
|
||||
selected = np.random.permutation(N)[:200]
|
||||
labels = data['DataTrnLbls'][selected]
|
||||
Y = Y[selected]
|
||||
N, D = Y.shape
|
||||
Y -= Y.mean(axis=0)
|
||||
Y /= Y.std(axis=0)
|
||||
|
||||
Q = 2
|
||||
m1 = GPy.models.sparse_GPLVM(Y, Q, M = 15)
|
||||
m1.constrain_positive('(rbf|bias|noise)')
|
||||
m1.constrain_bounded('white', 1e-6, 1.0)
|
||||
|
||||
plot_oil(m1.X, np.array([1,1]), labels, 'PCA initialization')
|
||||
# m.optimize(messages = True)
|
||||
m1.optimize('bfgs', messages = True)
|
||||
plot_oil(m1.X, np.array([1,1]), labels, 'sparse GPLVM')
|
||||
# pb.figure()
|
||||
# m.plot()
|
||||
# pb.title('PCA initialisation')
|
||||
# pb.figure()
|
||||
# m.optimize(messages = 1)
|
||||
# m.plot()
|
||||
# pb.title('After optimisation')
|
||||
m = GPy.models.GPLVM(Y, Q)
|
||||
m.constrain_positive('(white|rbf|bias|noise)')
|
||||
m.optimize()
|
||||
plot_oil(m.X, np.array([1,1]), labels, 'GPLVM')
|
||||
79
GPy/examples/regression.py
Normal file
79
GPy/examples/regression.py
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
"""
|
||||
Gaussian Processes regression examples
|
||||
"""
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
pb.ion()
|
||||
pb.close('all')
|
||||
|
||||
|
||||
def toy_rbf_1d():
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
data = GPy.util.datasets.toy_rbf_1d()
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(data['X'],data['Y'])
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
|
||||
# optimize
|
||||
m.optimize()
|
||||
|
||||
# plot
|
||||
m.plot()
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def rogers_girolami_olympics():
|
||||
"""Run a standard Gaussian process regression on the Rogers and Girolami olympics data."""
|
||||
data = GPy.util.datasets.rogers_girolami_olympics()
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(data['X'],data['Y'])
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
|
||||
# optimize
|
||||
m.optimize()
|
||||
|
||||
# plot
|
||||
m.plot(plot_limits = (1850, 2050))
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def toy_rbf_1d_50():
|
||||
"""Run a simple demonstration of a standard Gaussian process fitting it to data sampled from an RBF covariance."""
|
||||
data = GPy.util.datasets.toy_rbf_1d_50()
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(data['X'],data['Y'])
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
|
||||
# optimize
|
||||
m.optimize()
|
||||
|
||||
# plot
|
||||
m.plot()
|
||||
print(m)
|
||||
return m
|
||||
|
||||
def silhouette():
|
||||
"""Predict the pose of a figure given a silhouette. This is a task from Agarwal and Triggs 2004 ICML paper."""
|
||||
data = GPy.util.datasets.silhouette()
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(data['X'],data['Y'])
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
|
||||
# optimize
|
||||
m.optimize()
|
||||
|
||||
print(m)
|
||||
return m
|
||||
27
GPy/examples/sparse_GPLVM_demo.py
Normal file
27
GPy/examples/sparse_GPLVM_demo.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import numpy as np
|
||||
import pylab as pb
|
||||
import GPy
|
||||
np.random.seed(1)
|
||||
print "sparse GPLVM with RBF kernel"
|
||||
|
||||
N = 100
|
||||
Q = 1
|
||||
D = 2
|
||||
#generate GPLVM-like data
|
||||
X = np.random.rand(N, Q)
|
||||
k = GPy.kern.rbf(Q, 1.0, 2.0) + GPy.kern.white(Q, 0.0001)
|
||||
K = k.K(X)
|
||||
Y = np.random.multivariate_normal(np.zeros(N),K,D).T
|
||||
|
||||
m = GPy.models.sparse_GPLVM(Y, Q, M = 7)
|
||||
m.constrain_positive('(rbf|bias|noise)')
|
||||
m.constrain_bounded('white', 1e-3, 1.0)
|
||||
# m.plot()
|
||||
|
||||
pb.figure()
|
||||
m.plot()
|
||||
pb.title('PCA initialisation')
|
||||
pb.figure()
|
||||
m.optimize(messages = 1)
|
||||
m.plot()
|
||||
pb.title('After optimisation')
|
||||
66
GPy/examples/sparse_GP_regression_demo.py
Normal file
66
GPy/examples/sparse_GP_regression_demo.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import numpy as np
|
||||
"""
|
||||
Sparse Gaussian Processes regression with an RBF kernel
|
||||
"""
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
np.random.seed(2)
|
||||
pb.ion()
|
||||
N = 500
|
||||
|
||||
######################################
|
||||
## 1 dimensional example
|
||||
|
||||
# sample inputs and outputs
|
||||
X = np.random.uniform(-3.,3.,(N,1))
|
||||
Y = np.sin(X)+np.random.randn(N,1)*0.05
|
||||
|
||||
# construct kernel
|
||||
rbf = GPy.kern.Matern52(1)
|
||||
noise = GPy.kern.white(1)
|
||||
kernel = rbf + noise
|
||||
|
||||
# create simple GP model
|
||||
m1 = GPy.models.sparse_GP_regression(X,Y,kernel, M = 10)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m1.constrain_positive('(variance|lengthscale|precision)')
|
||||
#m1.constrain_positive('(variance|lengthscale)')
|
||||
#m1.constrain_fixed('prec',10.)
|
||||
|
||||
|
||||
#check gradient FIXME unit test please
|
||||
m1.checkgrad()
|
||||
# optimize and plot
|
||||
m1.optimize('bfgs', messages = 1)
|
||||
m1.plot()
|
||||
# print(m1)
|
||||
|
||||
######################################
|
||||
## 2 dimensional example
|
||||
|
||||
# # sample inputs and outputs
|
||||
# X = np.random.uniform(-3.,3.,(N,2))
|
||||
# Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(N,1)*0.05
|
||||
|
||||
# # construct kernel
|
||||
# rbf = GPy.kern.rbf(2)
|
||||
# noise = GPy.kern.white(2)
|
||||
# kernel = rbf + noise
|
||||
|
||||
# # create simple GP model
|
||||
# m2 = GPy.models.sparse_GP_regression(X,Y,kernel, M = 50)
|
||||
# create simple GP model
|
||||
|
||||
# # contrain all parameters to be positive (but not inducing inputs)
|
||||
# m2.constrain_positive('(variance|lengthscale|precision)')
|
||||
|
||||
# #check gradient FIXME unit test please
|
||||
# m2.checkgrad()
|
||||
|
||||
# # optimize and plot
|
||||
# pb.figure()
|
||||
# m2.optimize('tnc', messages = 1)
|
||||
# m2.plot()
|
||||
# print(m2)
|
||||
41
GPy/examples/warped_GP_demo.py
Normal file
41
GPy/examples/warped_GP_demo.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import numpy as np
|
||||
import scipy as sp
|
||||
import pdb, sys, pickle
|
||||
import matplotlib.pylab as plt
|
||||
import GPy
|
||||
np.random.seed(1)
|
||||
|
||||
N = 100
|
||||
# sample inputs and outputs
|
||||
X = np.random.uniform(-np.pi,np.pi,(N,1))
|
||||
Y = np.sin(X)+np.random.randn(N,1)*0.05
|
||||
# Y += np.abs(Y.min()) + 0.5
|
||||
Z = np.exp(Y)# Y**(1/3.0)
|
||||
|
||||
# rescaling targets?
|
||||
Zmax = Z.max()
|
||||
Zmin = Z.min()
|
||||
Z = (Z-Zmin)/(Zmax-Zmin) - 0.5
|
||||
|
||||
m = GPy.models.warpedGP(X, Z, warping_terms = 2)
|
||||
m.constrain_positive('(tanh_a|tanh_b|rbf|white|bias)')
|
||||
m.randomize()
|
||||
plt.figure()
|
||||
plt.xlabel('predicted f(Z)')
|
||||
plt.ylabel('actual f(Z)')
|
||||
plt.plot(m.Y, Y, 'o', alpha = 0.5, label = 'before training')
|
||||
m.optimize(messages = True)
|
||||
plt.plot(m.Y, Y, 'o', alpha = 0.5, label = 'after training')
|
||||
plt.legend(loc = 0)
|
||||
m.plot_warping()
|
||||
plt.figure()
|
||||
plt.title('warped GP fit')
|
||||
m.plot()
|
||||
|
||||
m1 = GPy.models.GP_regression(X, Z)
|
||||
m1.constrain_positive('(rbf|white|bias)')
|
||||
m1.randomize()
|
||||
m1.optimize(messages = True)
|
||||
plt.figure()
|
||||
plt.title('GP fit')
|
||||
m1.plot()
|
||||
Loading…
Add table
Add a link
Reference in a new issue