Merge branch 'devel' of https://github.com/SheffieldML/GPy into gradientsxx

This commit is contained in:
alessandratosi 2016-04-21 13:05:32 +01:00
commit 5c0c1d4c3d
6 changed files with 71 additions and 9 deletions

View file

@ -63,6 +63,6 @@ deploy:
on: on:
tags: true tags: true
branch: deploy branch: deploy
condition: "$TRAVIS_OS_NAME" == "osx" || ( "$TRAVIS_OS_NAME" == "linux" && "$PYTHON_VERSION" == "2.7" ) #condition: "$TRAVIS_OS_NAME" == "osx" || ( "$TRAVIS_OS_NAME" == "linux" && "$PYTHON_VERSION" == "2.7" )
distributions: $DIST distributions: $DIST
skip_cleanup: true skip_cleanup: true

View file

@ -10,7 +10,7 @@ from .src.add import Add
from .src.prod import Prod from .src.prod import Prod
from .src.rbf import RBF from .src.rbf import RBF
from .src.linear import Linear, LinearFull from .src.linear import Linear, LinearFull
from .src.static import Bias, White, Fixed, WhiteHeteroscedastic from .src.static import Bias, White, Fixed, WhiteHeteroscedastic, Precomputed
from .src.brownian import Brownian from .src.brownian import Brownian
from .src.stationary import Exponential, OU, Matern32, Matern52, ExpQuad, RatQuad, Cosine from .src.stationary import Exponential, OU, Matern32, Matern52, ExpQuad, RatQuad, Cosine
from .src.mlp import MLP from .src.mlp import MLP

View file

@ -192,3 +192,53 @@ class Fixed(Static):
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior): def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
self.variance.gradient = dL_dpsi0.sum() self.variance.gradient = dL_dpsi0.sum()
class Precomputed(Fixed):
def __init__(self, input_dim, covariance_matrix, variance=1., active_dims=None, name='precomputed'):
"""
Class for precomputed kernels, indexed by columns in X
Usage example:
import numpy as np
from GPy.models import GPClassification
from GPy.kern import Precomputed
from sklearn.cross_validation import LeaveOneOut
n = 10
d = 100
X = np.arange(n).reshape((n,1)) # column vector of indices
y = 2*np.random.binomial(1,0.5,(n,1))-1
X0 = np.random.randn(n,d)
k = np.dot(X0,X0.T)
kern = Precomputed(1,k) # k is a n x n covariance matrix
cv = LeaveOneOut(n)
ypred = y.copy()
for train, test in cv:
m = GPClassification(X[train], y[train], kernel=kern)
m.optimize()
ypred[test] = 2*(m.predict(X[test])[0]>0.5)-1
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance: the variance of the kernel
:type variance: float
"""
super(Precomputed, self).__init__(input_dim, covariance_matrix, variance, active_dims, name)
def K(self, X, X2=None):
if X2 is None:
return self.variance * self.fixed_K[X[:,0].astype('int')][:,X[:,0].astype('int')]
else:
return self.variance * self.fixed_K[X[:,0].astype('int')][:,X2[:,0].astype('int')]
def Kdiag(self, X):
return self.variance * self.fixed_K[X[:,0].astype('int')][:,X[:,0].astype('int')].diagonal()
def update_gradients_full(self, dL_dK, X, X2=None):
if X2 is None:
self.variance.gradient = np.einsum('ij,ij', dL_dK, self.fixed_K[X[:,0].astype('int')][:,X[:,0].astype('int')])
else:
self.variance.gradient = np.einsum('ij,ij', dL_dK, self.fixed_K[X[:,0].astype('int')][:,X2[:,0].astype('int')])
def update_gradients_diag(self, dL_dKdiag, X):
self.variance.gradient = np.einsum('i,ii', dL_dKdiag, self.fixed_K[X[:,0].astype('int')][:,X[:,0].astype('int')])

View file

@ -230,7 +230,7 @@ class StateSpaceKernelsTests(np.testing.TestCase):
use_cython=False, optimize_max_iters=10, check_gradients=True, use_cython=False, optimize_max_iters=10, check_gradients=True,
predict_X=X, predict_X=X,
gp_kernel=gp_kernel, gp_kernel=gp_kernel,
mean_compare_decimal=5, var_compare_decimal=5) mean_compare_decimal=2, var_compare_decimal=2)
ss_kernel, gp_kernel = get_new_kernels() ss_kernel, gp_kernel = get_new_kernels()
self.run_for_model(X, Y, ss_kernel, kalman_filter_type = 'svd', self.run_for_model(X, Y, ss_kernel, kalman_filter_type = 'svd',

View file

@ -2,11 +2,14 @@
# Licensed under the BSD 3-clause license (see LICENSE.txt) # Licensed under the BSD 3-clause license (see LICENSE.txt)
import unittest import unittest
import numpy as np from unittest.case import skip
import GPy import GPy
from GPy.core.parameterization.param import Param from GPy.core.parameterization.param import Param
import numpy as np
from ..util.config import config from ..util.config import config
from unittest.case import skip
verbose = 0 verbose = 0
@ -402,6 +405,15 @@ class KernelGradientTestsContinuous(unittest.TestCase):
k.randomize() k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose)) self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Precomputed(self):
Xall = np.concatenate([self.X, self.X2])
cov = np.dot(Xall, Xall.T)
X = np.arange(self.N).reshape(1,self.N)
X2 = np.arange(self.N,2*self.N+10).reshape(1,self.N+10)
k = GPy.kern.Precomputed(1, cov)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=X, X2=X2, verbose=verbose))
class KernelTestsMiscellaneous(unittest.TestCase): class KernelTestsMiscellaneous(unittest.TestCase):
def setUp(self): def setUp(self):
N, D = 100, 10 N, D = 100, 10

View file

@ -41,10 +41,10 @@ Python 2.7, 3.4 and higher
## Citation ## Citation
@Misc{gpy2014, @Misc{gpy2014,
author = {{The GPy authors}}, author = {{GPy}},
title = {{GPy}: A Gaussian process framework in python}, title = {{GPy}: A Gaussian process framework in python},
howpublished = {\url{http://github.com/SheffieldML/GPy}}, howpublished = {\url{http://github.com/SheffieldML/GPy}},
year = {2012--2015} year = {since 2012}
} }
### Pronounciation: ### Pronounciation: