Merge branch 'master' of github.com:SheffieldML/GPy

This commit is contained in:
Nicolo Fusi 2013-04-10 11:54:55 +01:00
commit 807e85123e
7 changed files with 45 additions and 26 deletions

View file

@ -427,10 +427,10 @@ class model(parameterised):
grad_string = "{0:^{c0}}|{1:^{c1}}|{2:^{c2}}|{3:^{c3}}|{4:^{c4}}".format(formatted_name,r,d,g, ng, c0 = cols[0]+9, c1 = cols[1], c2 = cols[2], c3 = cols[3], c4 = cols[4])
print grad_string
def EPEM(self,epsilon=.1,**kwargs):
def pseudo_EM(self,epsilon=.1,**kwargs):
"""
TODO: Should this not bein the GP class?
Expectation maximization for Expectation Propagation.
EM - like algorithm for Expectation Propagation and Laplace approximation
kwargs are passed to the optimize function. They can be:
@ -441,27 +441,33 @@ class model(parameterised):
:type optimzer: string TODO: valid strings?
"""
assert isinstance(self.likelihood,likelihoods.EP), "EM is not available for Gaussian likelihoods"
log_change = epsilon + 1.
self.log_likelihood_record = []
self.gp_params_record = []
self.ep_params_record = []
assert isinstance(self.likelihood,likelihoods.EP), "EPEM is only available for EP likelihoods"
ll_change = epsilon + 1.
iteration = 0
last_value = -np.exp(1000)
while log_change > epsilon or not iteration:
print 'EM iteration %s' %iteration
last_ll = -np.exp(1000)
convergence = False
alpha = 0
stop = False
while not stop:
last_approximation = self.likelihood.copy()
last_params = self._get_params()
self.likelihood.restart()
self.update_likelihood_approximation()
self.optimize(**kwargs)
new_value = self.log_likelihood()
log_change = new_value - last_value
if log_change > epsilon:
self.log_likelihood_record.append(new_value)
self.gp_params_record.append(self._get_params())
#self.ep_params_record.append((self.beta,self.Y,self.Z_ep))
last_value = new_value
new_ll = self.log_likelihood()
ll_change = new_ll - last_ll
if ll_change < 0:
self.likelihood = last_approximation #restore previous likelihood approximation
self._set_params(last_params) #restore model parameters
print "Log-likelihood decrement: %s \nLast likelihood update discarded." %ll_change
stop = True
else:
convergence = False
#self.beta, self.Y, self.Z_ep = self.ep_params_record[-1]
self._set_params(self.gp_params_record[-1])
print "Log-likelihood decrement: %s \nLast iteration discarded." %log_change
self.optimize(**kwargs)
last_ll = self.log_likelihood()
if ll_change < epsilon:
stop = True
iteration += 1

View file

@ -53,6 +53,7 @@ class periodic_Matern52(kernpart):
psi = np.where(r1 != 0, (np.arctan(r2/r1) + (r1<0.)*np.pi),np.arcsin(r2))
return r,omega[:,0:1], psi
@silence_errors
def _int_computation(self,r1,omega1,phi1,r2,omega2,phi2):
Gint1 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + 1./(omega1-omega2.T)*( np.sin((omega1-omega2.T)*self.upper+phi1-phi2.T) - np.sin((omega1-omega2.T)*self.lower+phi1-phi2.T) )
Gint2 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + np.cos(phi1-phi2.T)*(self.upper-self.lower)

View file

@ -53,6 +53,7 @@ class periodic_exponential(kernpart):
psi = np.where(r1 != 0, (np.arctan(r2/r1) + (r1<0.)*np.pi),np.arcsin(r2))
return r,omega[:,0:1], psi
@silence_errors
def _int_computation(self,r1,omega1,phi1,r2,omega2,phi2):
Gint1 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + 1./(omega1-omega2.T)*( np.sin((omega1-omega2.T)*self.upper+phi1-phi2.T) - np.sin((omega1-omega2.T)*self.lower+phi1-phi2.T) )
Gint2 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + np.cos(phi1-phi2.T)*(self.upper-self.lower)

View file

@ -33,6 +33,15 @@ class EP(likelihood):
self.Z = 0
self.YYT = None
def restart(self):
self.tau_tilde = np.zeros(self.N)
self.v_tilde = np.zeros(self.N)
self.Y = np.zeros((self.N,1))
self.covariance_matrix = np.eye(self.N)
self.precision = np.ones(self.N)[:,None]
self.Z = 0
self.YYT = None
def predictive_values(self,mu,var,full_cov):
if full_cov:
raise NotImplementedError, "Cannot make correlated predictions with an EP likelihood"

View file

@ -1,4 +1,5 @@
import numpy as np
import copy
class likelihood:
"""
@ -37,4 +38,6 @@ class likelihood:
def predictive_values(self, mu, var):
raise NotImplementedError
def copy(self):
""" Returns a (deep) copy of the current likelihood """
return copy.deepcopy(self)

View file

@ -183,7 +183,7 @@ def crescent_data(num_data=200,seed=default_seed):
for i in range(0, 4):
num_data_part.append(round(((i+1)*num_data)/4.))
num_data_part[i] -= num_data_total
print num_data_part[i]
#print num_data_part[i]
part = np.random.normal(size=(num_data_part[i], 2))
part = np.dot(np.dot(part, scales[i]), R) + means[i]
Xparts.append(part)
@ -201,5 +201,4 @@ def creep_data():
features = [0]
features.extend(range(2, 31))
X = all_data[:,features].copy()
return {'X': X, 'y' : y}

View file

@ -18,7 +18,7 @@ setup(name = 'GPy',
license = "BSD 3-clause",
keywords = "machine-learning gaussian-processes kernels",
url = "http://sheffieldml.github.com/GPy/",
packages = ['GPy', 'GPy.core', 'GPy.kern', 'GPy.util', 'GPy.models', 'GPy.inference', 'GPy.examples', 'GPy.likelihoods'],
packages = ['GPy', 'GPy.core', 'GPy.kern', 'GPy.util', 'GPy.models', 'GPy.inference', 'GPy.examples', 'GPy.likelihoods', 'GPy.testing'],
package_dir={'GPy': 'GPy'},
package_data = {'GPy': ['GPy/examples']},
py_modules = ['GPy.__init__'],