mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-08 19:42:39 +02:00
Example of sde_Matern covarince function is added, along with other small changes.
State-space example is slightly modified. Imports are corrected accordingly.
This commit is contained in:
parent
5b381af40d
commit
03d4096fe8
3 changed files with 72 additions and 2 deletions
|
|
@ -1,10 +1,20 @@
|
|||
import GPy
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from GPy.models.state_space import StateSpace
|
||||
|
||||
X = np.linspace(0, 10, 2000)[:, None]
|
||||
Y = np.sin(X) + np.random.randn(*X.shape)*0.1
|
||||
|
||||
kernel = GPy.kern.Matern32(X.shape[1])
|
||||
m = StateSpace(X,Y, kernel)
|
||||
m = GPy.models.StateSpace(X,Y, kernel)
|
||||
|
||||
m.optimize()
|
||||
|
||||
print m
|
||||
|
||||
kernel1 = GPy.kern.Matern32(X.shape[1])
|
||||
m1 = GPy.models.GPRegression(X,Y, kernel1)
|
||||
|
||||
m1.optimize()
|
||||
|
||||
print m1
|
||||
58
GPy/kern/_src/sde_Matern.py
Normal file
58
GPy/kern/_src/sde_Matern.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Classes in this module enhance Matern covariance functions with the
|
||||
Stochastic Differential Equation (SDE) functionality.
|
||||
"""
|
||||
from .stationary import Matern32
|
||||
import numpy as np
|
||||
|
||||
class sde_Matern32(Matern32):
|
||||
"""
|
||||
|
||||
Class provide extra functionality to transfer this covariance function into
|
||||
SDE forrm.
|
||||
|
||||
Matern 3/2 kernel:
|
||||
|
||||
.. math::
|
||||
|
||||
k(r) = \\sigma^2 (1 + \\sqrt{3} r) \exp(- \sqrt{3} r) \\ \\ \\ \\ \\text{ where } r = \sqrt{\sum_{i=1}^input_dim \\frac{(x_i-y_i)^2}{\ell_i^2} }
|
||||
|
||||
"""
|
||||
|
||||
def sde(self):
|
||||
"""
|
||||
Return the state space representation of the covariance.
|
||||
"""
|
||||
|
||||
variance = float(self.variance.values)
|
||||
lengthscale = float(self.lengthscale.values)
|
||||
foo = np.sqrt(3.)/lengthscale
|
||||
F = np.array([[0, 1], [-foo**2, -2*foo]])
|
||||
L = np.array([[0], [1]])
|
||||
Qc = np.array([[12.*np.sqrt(3) / lengthscale**3 * variance]])
|
||||
H = np.array([[1, 0]])
|
||||
Pinf = np.array([[variance, 0],
|
||||
[0, 3.*variance/(lengthscale**2)]])
|
||||
# Allocate space for the derivatives
|
||||
dF = np.empty([F.shape[0],F.shape[1],2])
|
||||
dQc = np.empty([Qc.shape[0],Qc.shape[1],2])
|
||||
dPinf = np.empty([Pinf.shape[0],Pinf.shape[1],2])
|
||||
# The partial derivatives
|
||||
dFvariance = np.zeros([2,2])
|
||||
dFlengthscale = np.array([[0,0],
|
||||
[6./lengthscale**3,2*np.sqrt(3)/lengthscale**2]])
|
||||
dQcvariance = np.array([12.*np.sqrt(3)/lengthscale**3])
|
||||
dQclengthscale = np.array([-3*12*np.sqrt(3)/lengthscale**4*variance])
|
||||
dPinfvariance = np.array([[1,0],[0,3./lengthscale**2]])
|
||||
dPinflengthscale = np.array([[0,0],
|
||||
[0,-6*variance/lengthscale**3]])
|
||||
# Combine the derivatives
|
||||
dF[:,:,0] = dFvariance
|
||||
dF[:,:,1] = dFlengthscale
|
||||
dQc[:,:,0] = dQcvariance
|
||||
dQc[:,:,1] = dQclengthscale
|
||||
dPinf[:,:,0] = dPinfvariance
|
||||
dPinf[:,:,1] = dPinflengthscale
|
||||
|
||||
return (F, L, Qc, H, Pinf, dF, dQc, dPinf)
|
||||
|
|
@ -22,3 +22,5 @@ from .gp_var_gauss import GPVariationalGaussianApproximation
|
|||
from .one_vs_all_classification import OneVsAllClassification
|
||||
from .one_vs_all_sparse_classification import OneVsAllSparseClassification
|
||||
from .dpgplvm import DPBayesianGPLVM
|
||||
|
||||
from state_space import StateSpace
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue