mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 19:42:39 +02:00
very basic GP_regression demo is working
This commit is contained in:
parent
07d793e309
commit
4b4f1da128
10 changed files with 31 additions and 41 deletions
|
|
@ -6,5 +6,5 @@ import kern
|
|||
import models
|
||||
import inference
|
||||
import util
|
||||
import examples
|
||||
#import examples
|
||||
from core import priors
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import sys, pdb
|
|||
from parameterised import parameterised, truncate_pad
|
||||
import priors
|
||||
from ..util.linalg import jitchol
|
||||
from ..inference import optimization, SGD
|
||||
from ..inference import optimization
|
||||
|
||||
class model(parameterised):
|
||||
def __init__(self):
|
||||
|
|
|
|||
|
|
@ -19,18 +19,14 @@ pb.close('all')
|
|||
X = np.random.uniform(-3.,3.,(20,1))
|
||||
Y = np.sin(X)+np.random.randn(20,1)*0.05
|
||||
|
||||
# construct kernel
|
||||
rbf = GPy.kern.rbf(1)
|
||||
noise = GPy.kern.white(1)
|
||||
kernel = rbf + noise
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(X,Y, kernel=kernel)
|
||||
m = GPy.models.GP_regression(X,Y)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('')
|
||||
|
||||
# optimize and plot
|
||||
m.optimize('rasm', max_f_eval = 1000)
|
||||
m.optimize('tnc', max_f_eval = 1000)
|
||||
m.plot()
|
||||
print(m)
|
||||
|
||||
|
|
@ -41,11 +37,6 @@ print(m)
|
|||
X = np.random.uniform(-3.,3.,(40,2))
|
||||
Y = np.sin(X[:,0:1]) * np.sin(X[:,1:2])+np.random.randn(40,1)*0.05
|
||||
|
||||
# construct kernel
|
||||
rbf = GPy.kern.rbf(2)
|
||||
noise = GPy.kern.white(2)
|
||||
kernel = rbf + noise
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.GP_regression(X,Y)
|
||||
|
||||
|
|
@ -53,7 +44,7 @@ m = GPy.models.GP_regression(X,Y)
|
|||
m.constrain_positive('')
|
||||
# optimize and plot
|
||||
pb.figure()
|
||||
m.optimize('rasm', max_f_eval = 1000)
|
||||
m.optimize('tnc', max_f_eval = 1000)
|
||||
m.plot()
|
||||
print(m)
|
||||
|
||||
|
|
|
|||
|
|
@ -166,8 +166,8 @@ class kern(parameterised):
|
|||
slices1, slices2 = self._process_slices(slices1,slices2)
|
||||
if X2 is None:
|
||||
X2 = X
|
||||
target = np.zeros((X.shape[0],X2.shape[0],self.Nparam))
|
||||
[p.dK_dtheta(X[s1,i_s],X2[s2,i_s],target[s1,s2,ps]) for p,i_s,ps,s1,s2 in zip(self.parts, self.input_slices, self.param_slices, slices1, slices2)]
|
||||
target = np.zeros(self.Nparam)
|
||||
[p.dK_dtheta(partial,X[s1,i_s],X2[s2,i_s],target[ps]) for p,i_s,ps,s1,s2 in zip(self.parts, self.input_slices, self.param_slices, slices1, slices2)]
|
||||
return target
|
||||
|
||||
def dK_dX(self,X,X2=None,slices1=None,slices2=None):
|
||||
|
|
@ -185,11 +185,13 @@ class kern(parameterised):
|
|||
[p.Kdiag(X[s,i_s],target=target[s]) for p,i_s,s in zip(self.parts,self.input_slices,slices)]
|
||||
return target
|
||||
|
||||
def dKdiag_dtheta(self,X,slices=None):
|
||||
def dKdiag_dtheta(self,partial,X,slices=None):
|
||||
assert X.shape[1]==self.D
|
||||
assert len(partial.shape)==1
|
||||
assert partial.size==X.shape[0]
|
||||
slices = self._process_slices(slices,False)
|
||||
target = np.zeros((X.shape[0],self.Nparam))
|
||||
[p.dKdiag_dtheta(X[s,i_s],target[s,ps]) for p,i_s,s,ps in zip(self.parts,self.input_slices,slices,self.param_slices)]
|
||||
target = np.zeros(self.Nparam)
|
||||
[p.dKdiag_dtheta(partial,X[s,i_s],target[ps]) for p,i_s,s,ps in zip(self.parts,self.input_slices,slices,self.param_slices)]
|
||||
return target
|
||||
|
||||
def dKdiag_dX(self, X, slices=None):
|
||||
|
|
|
|||
|
|
@ -50,14 +50,13 @@ class rbf(kernpart):
|
|||
def Kdiag(self,X,target):
|
||||
np.add(target,self.variance,target)
|
||||
|
||||
def dK_dtheta(self,X,X2,target):
|
||||
"""Return shape is NxMx(Ntheta)"""
|
||||
def dK_dtheta(self,partial,X,X2,target):
|
||||
self._K_computations(X,X2)
|
||||
target[:,:,0] += self._K_dvar
|
||||
target[:,:,1] += self._K_dvar*self.variance*self._K_dist2/self.lengthscale
|
||||
target[0] += np.sum(self._K_dvar*partial)
|
||||
target[1] += np.sum(self._K_dvar*self.variance*self._K_dist2/self.lengthscale*partial)
|
||||
|
||||
def dKdiag_dtheta(self,X,target):
|
||||
np.add(target[:,0],1.,target[:,0])
|
||||
target[0] += partial
|
||||
|
||||
def dK_dX(self,X,X2,target):
|
||||
self._K_computations(X,X2)
|
||||
|
|
|
|||
|
|
@ -37,10 +37,10 @@ class white(kernpart):
|
|||
def Kdiag(self,X,target):
|
||||
target += self.variance
|
||||
|
||||
def dK_dtheta(self,X,X2,target):
|
||||
def dK_dtheta(self,partial,X,X2,target):
|
||||
if X.shape==X2.shape:
|
||||
if np.all(X==X2):
|
||||
np.add(target[:,:,0],np.eye(X.shape[0]),target[:,:,0])
|
||||
target += np.trace(partial)
|
||||
|
||||
def dKdiag_dtheta(self,X,target):
|
||||
np.add(target[:,0],1.,target[:,0])
|
||||
|
|
@ -50,7 +50,7 @@ class white(kernpart):
|
|||
|
||||
def dKdiag_dX(self,X,target):
|
||||
pass
|
||||
|
||||
|
||||
def psi0(self,Z,mu,S,target):
|
||||
target += self.variance
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import pylab as pb
|
|||
from scipy import stats, linalg
|
||||
from .. import kern
|
||||
from ..inference.Expectation_Propagation import EP,Full
|
||||
from ..inference.likelihoods import likelihood,probit,poisson,gaussian
|
||||
from ..inference.likelihoods import likelihood,probit#,poisson,gaussian
|
||||
from ..core import model
|
||||
from ..util.linalg import pdinv,jitchol
|
||||
from ..util.plot import gpplot
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class GP_regression(model):
|
|||
|
||||
def __init__(self,X,Y,kernel=None,normalize_X=False,normalize_Y=False, Xslices=None):
|
||||
if kernel is None:
|
||||
kernel = kern.rbf(X.shape[1]) + kern.bias(X.shape[1]) + kern.white(X.shape[1])
|
||||
kernel = kern.rbf(X.shape[1]) + kern.white(X.shape[1])
|
||||
|
||||
# parse arguments
|
||||
self.Xslices = Xslices
|
||||
|
|
@ -103,7 +103,7 @@ class GP_regression(model):
|
|||
return dL_dK
|
||||
|
||||
def log_likelihood_gradients(self):
|
||||
return self.kern.dK_dtheta(self.X,partial=self.dL_dK())
|
||||
return self.kern.dK_dtheta(partial=self.dL_dK(),X=self.X)
|
||||
|
||||
def predict(self,Xnew, slices=None):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -6,6 +6,6 @@ from GP_regression import GP_regression
|
|||
from sparse_GP_regression import sparse_GP_regression
|
||||
from GPLVM import GPLVM
|
||||
from warped_GP import warpedGP
|
||||
from simple_GP_EP import GP_EP
|
||||
from GP_EP import GP_EP
|
||||
from generalized_FITC import generalized_FITC
|
||||
from sparse_GPLVM import sparse_GPLVM
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from .. import kern
|
|||
from ..core import model
|
||||
from ..util.linalg import pdinv,mdot
|
||||
from ..util.plot import gpplot
|
||||
from ..inference.Expectation_Propagation import EP,Full,DTC,FITC
|
||||
from ..inference.Expectation_Propagation import EP,Full,FITC
|
||||
from ..inference.likelihoods import likelihood,probit
|
||||
|
||||
class generalized_FITC(model):
|
||||
|
|
@ -17,14 +17,12 @@ class generalized_FITC(model):
|
|||
"""
|
||||
Naish-Guzman, A. and Holden, S. (2008) implemantation of EP with FITC.
|
||||
|
||||
Arguments
|
||||
---------
|
||||
X : input observations
|
||||
likelihood : Output's likelihood (likelihood class)
|
||||
kernel : a GPy kernel
|
||||
inducing : Either an array specifying the inducing points location or a scalar defining their number.
|
||||
epsilon_ep : EP convergence criterion, maximum squared difference allowed between mean updates to stop iterations (float)
|
||||
powerep : Power-EP parameters (eta,delta) - 2x1 numpy array (floats)
|
||||
:param X: input observations
|
||||
:param likelihood: Output's likelihood (likelihood class)
|
||||
:param kernel: a GPy kernel
|
||||
:param inducing: Either an array specifying the inducing points location or a scalar defining their number.
|
||||
:param epsilon_ep: EP convergence criterion, maximum squared difference allowed between mean updates to stop iterations (float)
|
||||
:param powerep: Power-EP parameters (eta,delta) - 2x1 numpy array (floats)
|
||||
"""
|
||||
assert isinstance(kernel,kern.kern)
|
||||
self.likelihood = likelihood
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue