merging with the gpy devel branch to be in sync with the latest code and make pull request again ..

This commit is contained in:
Akash Kumar Dhaka 2017-08-10 11:47:26 +03:00
commit f0f1a183b0
12 changed files with 204 additions and 40 deletions

View file

@ -1 +1 @@
__version__ = "1.6.2"
__version__ = "1.7.7"

View file

@ -124,7 +124,7 @@ class Binomial(Likelihood):
"""
N = Y_metadata['trials']
np.testing.assert_array_equal(N.shape, y.shape)
Ny = N-y
t1 = np.zeros(y.shape)
t2 = np.zeros(y.shape)
@ -132,6 +132,7 @@ class Binomial(Likelihood):
t2[Ny>0] = -(Ny[Ny>0])/np.square(1.-inv_link_f[Ny>0])
return t1+t2
def d3logpdf_dlink3(self, inv_link_f, y, Y_metadata=None):
"""
Third order derivative log-likelihood function at y given inverse link of f w.r.t inverse link of f

View file

@ -306,11 +306,7 @@ class StateSpaceKernelsTests(np.testing.TestCase):
gp_kernel=gp_kernel,
mean_compare_decimal=2, var_compare_decimal=2)
def test_forecast(self,):
"""
Test time-series forecasting.
"""
def test_forecast_regular(self,):
# Generate data ->
np.random.seed(339) # seed the random number generator
#import pdb; pdb.set_trace()
@ -334,37 +330,102 @@ class StateSpaceKernelsTests(np.testing.TestCase):
#import pdb; pdb.set_trace()
def get_new_kernels():
periodic_kernel = GPy.kern.StdPeriodic(1,active_dims=[0,])
gp_kernel = GPy.kern.Linear(1, active_dims=[0,]) + GPy.kern.Bias(1, active_dims=[0,]) + periodic_kernel
gp_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
gp_kernel.std_periodic.period.constrain_bounded(0.15, 100)
periodic_kernel = GPy.kern.StdPeriodic(1,active_dims=[0,])
gp_kernel = GPy.kern.Linear(1, active_dims=[0,]) + GPy.kern.Bias(1, active_dims=[0,]) + periodic_kernel
gp_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
gp_kernel.std_periodic.period.constrain_bounded(0.15, 100)
periodic_kernel = GPy.kern.sde_StdPeriodic(1,active_dims=[0,])
ss_kernel = GPy.kern.sde_Linear(1,X,active_dims=[0,]) + \
GPy.kern.sde_Bias(1, active_dims=[0,]) + periodic_kernel
periodic_kernel = GPy.kern.sde_StdPeriodic(1,active_dims=[0,])
ss_kernel = GPy.kern.sde_Linear(1,X,active_dims=[0,]) + \
GPy.kern.sde_Bias(1, active_dims=[0,]) + periodic_kernel
ss_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
ss_kernel.std_periodic.period.constrain_bounded(0.15, 100)
ss_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
ss_kernel.std_periodic.period.constrain_bounded(0.15, 100)
return ss_kernel, gp_kernel
ss_kernel, gp_kernel = get_new_kernels()
self.run_for_model(X_train, Y_train, ss_kernel, kalman_filter_type = 'regular',
use_cython=False, optimize_max_iters=30, check_gradients=True,
predict_X=X_test,
gp_kernel=gp_kernel,
mean_compare_decimal=2, var_compare_decimal=2)
def test_forecast_svd(self,):
# Generate data ->
np.random.seed(339) # seed the random number generator
#import pdb; pdb.set_trace()
(X,Y) = generate_sine_data(x_points=None, sin_period=5.0, sin_ampl=5.0, noise_var=2.0,
plot = False, points_num=100, x_interval = (0, 40), random=True)
(X1,Y1) = generate_linear_data(x_points=X, tangent=1.0, add_term=20.0, noise_var=0.0,
plot = False, points_num=100, x_interval = (0, 40), random=True)
Y = Y + Y1
X_train = X[X <= 20]
Y_train = Y[X <= 20]
X_test = X[X > 20]
Y_test = Y[X > 20]
X.shape = (X.shape[0],1); Y.shape = (Y.shape[0],1)
X_train.shape = (X_train.shape[0],1); Y_train.shape = (Y_train.shape[0],1)
X_test.shape = (X_test.shape[0],1); Y_test.shape = (Y_test.shape[0],1)
# Generate data <-
#import pdb; pdb.set_trace()
periodic_kernel = GPy.kern.StdPeriodic(1,active_dims=[0,])
gp_kernel = GPy.kern.Linear(1, active_dims=[0,]) + GPy.kern.Bias(1, active_dims=[0,]) + periodic_kernel
gp_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
gp_kernel.std_periodic.period.constrain_bounded(0.15, 100)
periodic_kernel = GPy.kern.sde_StdPeriodic(1,active_dims=[0,])
ss_kernel = GPy.kern.sde_Linear(1,X,active_dims=[0,]) + \
GPy.kern.sde_Bias(1, active_dims=[0,]) + periodic_kernel
ss_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
ss_kernel.std_periodic.period.constrain_bounded(0.15, 100)
ss_kernel, gp_kernel = get_new_kernels()
self.run_for_model(X_train, Y_train, ss_kernel, kalman_filter_type = 'svd',
use_cython=False, optimize_max_iters=30, check_gradients=False,
predict_X=X_test,
gp_kernel=gp_kernel,
mean_compare_decimal=2, var_compare_decimal=2)
ss_kernel, gp_kernel = get_new_kernels()
def test_forecast_svd_cython(self,):
# Generate data ->
np.random.seed(339) # seed the random number generator
#import pdb; pdb.set_trace()
(X,Y) = generate_sine_data(x_points=None, sin_period=5.0, sin_ampl=5.0, noise_var=2.0,
plot = False, points_num=100, x_interval = (0, 40), random=True)
(X1,Y1) = generate_linear_data(x_points=X, tangent=1.0, add_term=20.0, noise_var=0.0,
plot = False, points_num=100, x_interval = (0, 40), random=True)
Y = Y + Y1
X_train = X[X <= 20]
Y_train = Y[X <= 20]
X_test = X[X > 20]
Y_test = Y[X > 20]
X.shape = (X.shape[0],1); Y.shape = (Y.shape[0],1)
X_train.shape = (X_train.shape[0],1); Y_train.shape = (Y_train.shape[0],1)
X_test.shape = (X_test.shape[0],1); Y_test.shape = (Y_test.shape[0],1)
# Generate data <-
#import pdb; pdb.set_trace()
periodic_kernel = GPy.kern.StdPeriodic(1,active_dims=[0,])
gp_kernel = GPy.kern.Linear(1, active_dims=[0,]) + GPy.kern.Bias(1, active_dims=[0,]) + periodic_kernel
gp_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
gp_kernel.std_periodic.period.constrain_bounded(0.15, 100)
periodic_kernel = GPy.kern.sde_StdPeriodic(1,active_dims=[0,])
ss_kernel = GPy.kern.sde_Linear(1,X,active_dims=[0,]) + \
GPy.kern.sde_Bias(1, active_dims=[0,]) + periodic_kernel
ss_kernel.std_periodic.lengthscale.constrain_bounded(0.25, 1000)
ss_kernel.std_periodic.period.constrain_bounded(0.15, 100)
self.run_for_model(X_train, Y_train, ss_kernel, kalman_filter_type = 'svd',
use_cython=True, optimize_max_iters=30, check_gradients=False,
predict_X=X_test,

View file

@ -85,6 +85,7 @@ class InferenceGPEP(unittest.TestCase):
inference_method=inf,
likelihood=lik)
K = self.model.kern.K(X)
post_params, ga_approx, cav_params, log_Z_tilde = self.model.inference_method.expectation_propagation(K, ObsAr(Y), lik, None)
mu_tilde = ga_approx.v / ga_approx.tau.astype(float)

View file

@ -206,7 +206,10 @@ def authorize_download(dataset_name=None):
def download_data(dataset_name=None):
"""Check with the user that the are happy with terms and conditions for the data set, then download it."""
import itertools
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
dr = data_resources[dataset_name]
if not authorize_download(dataset_name):
@ -220,8 +223,8 @@ def download_data(dataset_name=None):
if 'suffices' in dr: zip_urls += (dr['suffices'], )
else: zip_urls += ([],)
for url, files, save_names, suffices in itertools.zip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in itertools.zip_longest(files, save_names, suffices, fillvalue=None):
for url, files, save_names, suffices in zip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in zip_longest(files, save_names, suffices, fillvalue=None):
download_url(os.path.join(url,f), dataset_name, save_name, suffix=suffix)
return True