added Brownian motion

This commit is contained in:
James Hensman 2014-02-21 12:25:36 +00:00
parent 0dc9a32ba3
commit 365bc42140
6 changed files with 65 additions and 83 deletions

View file

@ -2,8 +2,8 @@ from _src.rbf import RBF
from _src.white import White
from _src.kern import Kern
from _src.linear import Linear
#import bias
#import Brownian
from _src.brownian import Brownian
#from _src.bias import Bias
#import coregionalize
#import exponential
#import eq_ode1

View file

@ -1,65 +0,0 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kernpart import Kernpart
import numpy as np
def theta(x):
"""Heavisdie step function"""
return np.where(x>=0.,1.,0.)
class Brownian(Kernpart):
"""
Brownian Motion kernel.
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance:
:type variance: float
"""
def __init__(self,input_dim,variance=1.):
self.input_dim = input_dim
assert self.input_dim==1, "Brownian motion in 1D only"
self.num_params = 1
self.name = 'Brownian'
self._set_params(np.array([variance]).flatten())
def _get_params(self):
return self.variance
def _set_params(self,x):
assert x.shape==(1,)
self.variance = x
def _get_param_names(self):
return ['variance']
def K(self,X,X2,target):
if X2 is None:
X2 = X
target += self.variance*np.fmin(X,X2.T)
def Kdiag(self,X,target):
target += self.variance*X.flatten()
def _param_grad_helper(self,dL_dK,X,X2,target):
if X2 is None:
X2 = X
target += np.sum(np.fmin(X,X2.T)*dL_dK)
def dKdiag_dtheta(self,dL_dKdiag,X,target):
target += np.dot(X.flatten(), dL_dKdiag)
def gradients_X(self,dL_dK,X,X2,target):
raise NotImplementedError, "TODO"
#target += self.variance
#target -= self.variance*theta(X-X2.T)
#if X.shape==X2.shape:
#if np.all(X==X2):
#np.add(target[:,:,0],self.variance*np.diag(X2.flatten()-X.flatten()),target[:,:,0])
def dKdiag_dX(self,dL_dKdiag,X,target):
target += self.variance*dL_dKdiag[:,None]

50
GPy/kern/_src/brownian.py Normal file
View file

@ -0,0 +1,50 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from kern import Kern
from ...core.parameterization import Param
from ...core.parameterization.transformations import Logexp
import numpy as np
class Brownian(Kern):
"""
Brownian motion in 1D only.
Negative times are treated as a separate (backwards!) Brownian motion.
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance:
:type variance: float
"""
def __init__(self, input_dim=1, variance=1., name='Brownian'):
assert input_dim==1, "Brownian motion in 1D only"
super(Brownian, self).__init__(input_dim, name)
self.variance = Param('variance', variance, Logexp())
self.add_parameters(self.variance)
def K(self,X,X2=None):
if X2 is None:
X2 = X
return self.variance*np.where(np.sign(X)==np.sign(X2.T),np.fmin(np.abs(X),np.abs(X2.T)), 0.)
def Kdiag(self,X):
return self.variance*np.abs(X.flatten())
def update_gradients_full(self, dL_dK, X, X2=None):
if X2 is None:
X2 = X
self.variance.gradient = np.sum(dL_dK * np.where(np.sign(X)==np.sign(X2.T),np.fmin(np.abs(X),np.abs(X2.T)), 0.))
#def update_gradients_diag(self, dL_dKdiag, X):
#self.variance.gradient = np.dot(np.abs(X.flatten()), dL_dKdiag)
#def gradients_X(self, dL_dK, X, X2=None):
#if X2 is None:
#return np.sum(self.variance*dL_dK*np.abs(X),1)[:,None]
#else:
#return np.sum(np.where(np.logical_and(np.abs(X)<np.abs(X2.T), np.sign(X)==np.sign(X2)), self.variance*dL_dK,0.),1)[:,None]

View file

@ -40,8 +40,15 @@ class Kern(Parameterized):
"""Set the gradients of all parameters when doing full (N) inference."""
raise NotImplementedError
def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
"""Set the gradients of all parameters when doing sparse (M) inference."""
raise NotImplementedError
target = np.zeros(self.size)
self.update_gradients_diag(dL_dKdiag, X)
self._collect_gradient(target)
self.update_gradients_full(dL_dKnm, X, Z)
self._collect_gradient(target)
self.update_gradients_full(dL_dKmm, Z, None)
self._collect_gradient(target)
self._set_gradient(target)
def update_gradients_variational(self, dL_dKmm, dL_dpsi0, dL_dpsi1, dL_dpsi2, mu, S, Z):
"""Set the gradients of all parameters when doing variational (M) inference with uncertain inputs."""
raise NotImplementedError

View file

@ -74,16 +74,6 @@ class Linear(Kern):
def Kdiag(self, X):
return np.sum(self.variances * np.square(X), -1)
def update_gradients_sparse(self, dL_dKmm, dL_dKnm, dL_dKdiag, X, Z):
target = np.zeros(self.size)
self.update_gradients_diag(dL_dKdiag, X)
self._collect_gradient(target)
self.update_gradients_full(dL_dKnm, X, Z)
self._collect_gradient(target)
self.update_gradients_full(dL_dKmm, Z, None)
self._collect_gradient(target)
self._set_gradient(target)
def update_gradients_full(self, dL_dK, X, X2=None):
if self.ARD:
if X2 is None:

View file

@ -92,11 +92,11 @@ def plot_fit(model, plot_limits=None, which_data_rows='all',
ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25)
#ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs.
#add error bars for uncertain (if input uncertainty is being modelled)
if hasattr(model,"has_uncertain_inputs"):
ax.errorbar(model.X[which_data, free_dims], model.likelihood.data[which_data, 0],
xerr=2 * np.sqrt(model.X_variance[which_data, free_dims]),
if hasattr(model,"has_uncertain_inputs") and model.has_uncertain_inputs():
ax.errorbar(model.X[which_data_rows, free_dims], model.Y[which_data_rows, which_data_ycols],
xerr=2 * np.sqrt(model.X_variance[which_data_rows, free_dims]),
ecolor='k', fmt=None, elinewidth=.5, alpha=.5)