Merge branch 'devel' of github.com:SheffieldML/GPy into devel

This commit is contained in:
Max Zwiessele 2014-11-03 14:20:03 +00:00
commit 80b1bbbd64
16 changed files with 74 additions and 181 deletions

View file

@ -14,7 +14,6 @@ import examples
import likelihoods
import testing
from numpy.testing import Tester
from nose.tools import nottest
import kern
import plotting
@ -22,10 +21,16 @@ import plotting
from core import Model
from core.parameterization import Param, Parameterized, ObsAr
@nottest
def tests():
Tester(testing).test(verbose=10)
#@nottest
try:
#Get rid of nose dependency by only ignoring if you have nose installed
from nose.tools import nottest
@nottest
def tests():
Tester(testing).test(verbose=10)
except:
def tests():
Tester(testing).test(verbose=10)
def load(file_path):
"""
@ -36,4 +41,4 @@ def load(file_path):
import cPickle as pickle
with open(file_path, 'rb') as f:
m = pickle.load(f)
return m
return m

View file

@ -468,7 +468,7 @@ def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True):
k = GPy.kern.RBF(1)
# create simple GP Model - no input uncertainty on this one
m = GPy.models.SparseGPRegression(X, Y, kernel=GPy.kern.RBF(1), Z=Z)
m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z)
if optimize:
m.optimize('scg', messages=1, max_iters=max_iters)

View file

@ -14,6 +14,9 @@ import numpy as np
from ...util.linalg import mdot, jitchol, dpotrs, dtrtrs, dpotri, symmetrify, pdinv
from posterior import Posterior
import warnings
def warning_on_one_line(message, category, filename, lineno, file=None, line=None):
return ' %s:%s: %s:%s\n' % (filename, lineno, category.__name__, message)
warnings.formatwarning = warning_on_one_line
from scipy import optimize
from . import LatentFunctionInference
@ -29,8 +32,11 @@ class Laplace(LatentFunctionInference):
"""
self._mode_finding_tolerance = 1e-7
self._mode_finding_max_iter = 40
self.bad_fhat = True
self._mode_finding_max_iter = 60
self.bad_fhat = False
#Store whether it is the first run of the inference so that we can choose whether we need
#to calculate things or reuse old variables
self.first_run = True
self._previous_Ki_fhat = None
def inference(self, kern, X, likelihood, Y, Y_metadata=None):
@ -42,8 +48,9 @@ class Laplace(LatentFunctionInference):
K = kern.K(X)
#Find mode
if self.bad_fhat:
if self.bad_fhat or self.first_run:
Ki_f_init = np.zeros_like(Y)
first_run = False
else:
Ki_f_init = self._previous_Ki_fhat
@ -123,11 +130,11 @@ class Laplace(LatentFunctionInference):
#Warn of bad fits
if difference > self._mode_finding_tolerance:
if not self.bad_fhat:
warnings.warn("Not perfect f_hat fit difference: {}".format(difference))
warnings.warn("Not perfect mode found (f_hat). difference: {}, iteration: {} out of max {}".format(difference, iteration, self._mode_finding_max_iter))
self.bad_fhat = True
elif self.bad_fhat:
self.bad_fhat = False
warnings.warn("f_hat now fine again")
warnings.warn("f_hat now fine again. difference: {}, iteration: {} out of max {}".format(difference, iteration, self._mode_finding_max_iter))
return f, Ki_f

View file

@ -167,6 +167,7 @@ class VarDTC(LatentFunctionInference):
woodbury_vector = Cpsi1Vf # == Cpsi1V
else:
print 'foobar'
stop
psi1V = np.dot(Y.T*beta, psi1).T
tmp, _ = dtrtrs(Lm, psi1V, lower=1, trans=0)
tmp, _ = dpotrs(LB, tmp, lower=1)

View file

@ -0,0 +1 @@
from hmc import HMC

View file

@ -1,10 +1,23 @@
"""HMC implementation"""
# ## Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
class HMC:
def __init__(self,model,M=None,stepsize=1e-1):
"""
An implementation of Hybrid Monte Carlo (HMC) for GPy models
Initialize an object for HMC sampling. Note that the status of the model (model parameters) will be changed during sampling.
:param model: the GPy model that will be sampled
:type model: GPy.core.Model
:param M: the mass matrix (an identity matrix by default)
:type M: numpy.ndarray
:param stepsize: the step size for HMC sampling
:type stepsize: float
"""
def __init__(self, model, M=None,stepsize=1e-1):
self.model = model
self.stepsize = stepsize
self.p = np.empty_like(model.optimizer_array.copy())
@ -14,9 +27,19 @@ class HMC:
self.M = M
self.Minv = np.linalg.inv(self.M)
def sample(self, m_iters=1000, hmc_iters=20):
params = np.empty((m_iters,self.p.size))
for i in xrange(m_iters):
def sample(self, num_samples=1000, hmc_iters=20):
"""
Sample the (unfixed) model parameters.
:param num_samples: the number of samples to draw (1000 by default)
:type num_samples: int
:param hmc_iters: the number of leap-frog iterations (20 by default)
:type hmc_iters: int
:return: the list of parameters samples with the size N x P (N - the number of samples, P - the number of parameters to sample)
:rtype: numpy.ndarray
"""
params = np.empty((num_samples,self.p.size))
for i in xrange(num_samples):
self.p[:] = np.random.multivariate_normal(np.zeros(self.p.size),self.M)
H_old = self._computeH()
theta_old = self.model.optimizer_array.copy()
@ -125,8 +148,6 @@ class HMC_shortcut:
break
else:
Hlist = range(hmc_iters+pos,hmc_iters+pos+self.groupsize)
# print Hlist
# print self._testH(H_buf[Hlist])
if self._testH(H_buf[Hlist]):
pos += -1
@ -139,14 +160,10 @@ class HMC_shortcut:
pos_new = pos + r
self.model.optimizer_array = theta_buf[hmc_iters+pos_new]
self.p[:] = p_buf[hmc_iters+pos_new] # the sign of momentum might be wrong!
# print reversal[0],pos,pos_new
# print H_buf
break
def _testH(self, Hlist):
Hstd = np.std(Hlist)
# print Hlist
# print Hstd
if Hstd<self.Hstd_th[0] or Hstd>self.Hstd_th[1]:
return False
else:

View file

@ -1,63 +0,0 @@
import numpy as np
from scipy.stats import norm
import matplotlib.pyplot as plt
####### Preliminar BO with standad acquisition functions ###############################
# Types of BO
# MM: Maximum (or minimum) mean
# MPI: Maximum posterior improvement
# MUI: Maximum upper interval
def BOacquisition(X,Y,model,type_bo="MPI",type_objective="max",par_mpi = 0,z_mui=1.96,plot=True,n_eval = 500):
# Only works in dimension 1
# Grid where the GP will be evaluated
X_star = np.linspace(min(X)-10,max(X)+10,n_eval)
X_star = X_star[:,None]
# Posterior GP evaluated on the grid
fest = model.predict(X_star)
# Calculate the acquisition function
## IF Maximize
if type_objective == "max":
if type_bo == "MPI": # add others here
acqu = norm.cdf((fest[0]-(1+par_mpi)*max(fest[0])) / fest[1])
acqu = acqu/(2*max(acqu))
if type_bo == "MM":
acqu = fest[0]/max(fest[0])
acqu = acqu/(2*max(acqu))
if type_bo == "MUI":
acqu = fest[0]+z_mui*np.sqrt(fest[1])
acqu = acqu/(2*max(acqu))
optimal_loc = np.argmax(acqu)
x_new = X_star[optimal_loc]
## IF Minimize
if type_objective == "min":
if type_bo == "MPI": # add others here
acqu = 1-norm.cdf((fest[0]-(1+par_mpi)*min(fest[0])) / fest[1])
acqu = acqu/(2*max(acqu))
if type_bo == "MM":
acqu = 1-fest[0]/max(fest[0])
acqu = acqu/(2*max(acqu))
if type_bo == "MUI":
acqu = -fest[0]+z_mui*np.sqrt(fest[1])
acqu = acqu/(2*max(acqu))
optimal_loc = np.argmax(acqu)
x_new = X_star[optimal_loc]
# Plot GP posterior, collected data and the acquisition function
if plot:
plt.plot(X,Y , 'p')
plt.title('Acquisition function')
model.plot()
plt.plot(X_star, acqu, 'r--')
# Return the point where we shoould take the new sample
return x_new
###############################################################

View file

@ -1,3 +1,2 @@
from scg import SCG
from optimization import *
from hmc import HMC,HMC_shortcut

View file

@ -17,10 +17,15 @@ from GPy.kern._src.rbf import RBF
from GPy.kern._src.linear import Linear
from GPy.kern._src.static import Bias, White
from GPy.examples.dimensionality_reduction import mrd_simulation
from GPy.examples.regression import toy_rbf_1d_50
from GPy.core.parameterization.variational import NormalPosterior
from GPy.models.gp_regression import GPRegression
def toy_model():
X = np.linspace(0,1,50)[:, None]
Y = np.sin(X)
m = GPRegression(X=X, Y=Y)
return m
class ListDictTestCase(unittest.TestCase):
def assertListDictEquals(self, d1, d2, msg=None):
for k,v in d1.iteritems():
@ -105,7 +110,7 @@ class Test(ListDictTestCase):
self.assertSequenceEqual(str(par), str(pcopy))
def test_model(self):
par = toy_rbf_1d_50(optimize=0, plot=0)
par = toy_model()
pcopy = par.copy()
self.assertListEqual(par.param_array.tolist(), pcopy.param_array.tolist())
np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
@ -124,7 +129,7 @@ class Test(ListDictTestCase):
self.assert_(pcopy.checkgrad())
def test_modelrecreation(self):
par = toy_rbf_1d_50(optimize=0, plot=0)
par = toy_model()
pcopy = GPRegression(par.X.copy(), par.Y.copy(), kernel=par.kern.copy())
np.testing.assert_allclose(par.param_array, pcopy.param_array)
np.testing.assert_allclose(par.gradient_full, pcopy.gradient_full)
@ -135,7 +140,7 @@ class Test(ListDictTestCase):
self.assert_(np.any(pcopy.gradient!=0.0))
pcopy.optimize('bfgs')
par.optimize('bfgs')
np.testing.assert_allclose(pcopy.param_array, par.param_array, atol=.001)
np.testing.assert_allclose(pcopy.param_array, par.param_array, atol=1e-6)
with tempfile.TemporaryFile('w+b') as f:
par.pickle(f)
f.seek(0)
@ -193,7 +198,7 @@ class Test(ListDictTestCase):
@unittest.skip
def test_add_observer(self):
par = toy_rbf_1d_50(optimize=0, plot=0)
par = toy_model()
par.name = "original"
par.count = 0
par.add_observer(self, self._callback, 1)
@ -211,4 +216,4 @@ class Test(ListDictTestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_parameter_index_operations']
unittest.main()
unittest.main()

View file

@ -4,14 +4,6 @@ GPy.inference.optimization package
Submodules
----------
GPy.inference.optimization.BayesOpt module
------------------------------------------
.. automodule:: GPy.inference.optimization.BayesOpt
:members:
:undoc-members:
:show-inheritance:
GPy.inference.optimization.conjugate_gradient_descent module
------------------------------------------------------------
@ -28,14 +20,6 @@ GPy.inference.optimization.gradient_descent_update_rules module
:undoc-members:
:show-inheritance:
GPy.inference.optimization.hmc module
-------------------------------------
.. automodule:: GPy.inference.optimization.hmc
:members:
:undoc-members:
:show-inheritance:
GPy.inference.optimization.optimization module
----------------------------------------------
@ -44,14 +28,6 @@ GPy.inference.optimization.optimization module
:undoc-members:
:show-inheritance:
GPy.inference.optimization.samplers module
------------------------------------------
.. automodule:: GPy.inference.optimization.samplers
:members:
:undoc-members:
:show-inheritance:
GPy.inference.optimization.scg module
-------------------------------------
@ -68,6 +44,14 @@ GPy.inference.optimization.sgd module
:undoc-members:
:show-inheritance:
GPy.inference.optimization.stochastics module
---------------------------------------------
.. automodule:: GPy.inference.optimization.stochastics
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------

View file

@ -7,6 +7,7 @@ Subpackages
.. toctree::
GPy.inference.latent_function_inference
GPy.inference.mcmc
GPy.inference.optimization
Module contents

View file

@ -60,14 +60,6 @@ GPy.likelihoods.mixed_noise module
:undoc-members:
:show-inheritance:
GPy.likelihoods.negative_binomial module
----------------------------------------
.. automodule:: GPy.likelihoods.negative_binomial
:members:
:undoc-members:
:show-inheritance:
GPy.likelihoods.ordinal module
------------------------------
@ -84,30 +76,6 @@ GPy.likelihoods.poisson module
:undoc-members:
:show-inheritance:
GPy.likelihoods.skew_exponential module
---------------------------------------
.. automodule:: GPy.likelihoods.skew_exponential
:members:
:undoc-members:
:show-inheritance:
GPy.likelihoods.skew_normal module
----------------------------------
.. automodule:: GPy.likelihoods.skew_normal
:members:
:undoc-members:
:show-inheritance:
GPy.likelihoods.sstudent_t module
---------------------------------
.. automodule:: GPy.likelihoods.sstudent_t
:members:
:undoc-members:
:show-inheritance:
GPy.likelihoods.student_t module
--------------------------------
@ -116,14 +84,6 @@ GPy.likelihoods.student_t module
:undoc-members:
:show-inheritance:
GPy.likelihoods.symbolic module
-------------------------------
.. automodule:: GPy.likelihoods.symbolic
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------

View file

@ -36,14 +36,6 @@ GPy.mappings.mlp module
:undoc-members:
:show-inheritance:
GPy.mappings.symbolic module
----------------------------
.. automodule:: GPy.mappings.symbolic
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------

View file

@ -84,14 +84,6 @@ GPy.testing.prior_tests module
:undoc-members:
:show-inheritance:
GPy.testing.tie_tests module
----------------------------
.. automodule:: GPy.testing.tie_tests
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------

View file

@ -204,14 +204,6 @@ GPy.util.subarray_and_sorting module
:undoc-members:
:show-inheritance:
GPy.util.symbolic module
------------------------
.. automodule:: GPy.util.symbolic
:members:
:undoc-members:
:show-inheritance:
GPy.util.univariate_Gaussian module
-----------------------------------