From e6261c787cf68b67a1e8ec49be79cd455529028f Mon Sep 17 00:00:00 2001 From: Zhenwen Dai Date: Fri, 2 Oct 2015 22:43:54 +0100 Subject: [PATCH] add original bfgs optimizer and add rbf with inverse lengthscale --- GPy/inference/optimization/optimization.py | 28 ++++++++++++++++++++++ GPy/kern/_src/rbf.py | 22 ++++++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/GPy/inference/optimization/optimization.py b/GPy/inference/optimization/optimization.py index 9aab1bec..a7e44f2e 100644 --- a/GPy/inference/optimization/optimization.py +++ b/GPy/inference/optimization/optimization.py @@ -143,6 +143,33 @@ class opt_lbfgsb(Optimizer): #a more helpful error message is available in opt_result in the Error case if opt_result[2]['warnflag']==2: self.status = 'Error' + str(opt_result[2]['task']) + +class opt_bfgs(Optimizer): + def __init__(self, *args, **kwargs): + Optimizer.__init__(self, *args, **kwargs) + self.opt_name = "BFGS (Scipy implementation)" + + def opt(self, f_fp=None, f=None, fp=None): + """ + Run the optimizer + + """ + rcstrings = ['','Maximum number of iterations exceeded', 'Gradient and/or function calls not changing'] + + opt_dict = {} + if self.xtol is not None: + print("WARNING: bfgs doesn't have an xtol arg, so I'm going to ignore it") + if self.ftol is not None: + print("WARNING: bfgs doesn't have an ftol arg, so I'm going to ignore it") + if self.gtol is not None: + opt_dict['pgtol'] = self.gtol + + opt_result = optimize.fmin_bfgs(f, self.x_init, fp, disp=self.messages, + maxiter=self.max_iters, full_output=True, **opt_dict) + self.x_opt = opt_result[0] + self.f_opt = f_fp(self.x_opt)[0] + self.funct_eval = opt_result[4] + self.status = rcstrings[opt_result[6]] class opt_simplex(Optimizer): def __init__(self, *args, **kwargs): @@ -255,6 +282,7 @@ def get_optimizer(f_min): optimizers = {'fmin_tnc': opt_tnc, 'simplex': opt_simplex, 'lbfgsb': opt_lbfgsb, + 'org-bfgs': opt_bfgs, 'scg': opt_SCG, 'adadelta':Opt_Adadelta} diff --git a/GPy/kern/_src/rbf.py b/GPy/kern/_src/rbf.py index cb34738a..f4fb2ad5 100644 --- a/GPy/kern/_src/rbf.py +++ b/GPy/kern/_src/rbf.py @@ -7,6 +7,8 @@ from .stationary import Stationary from .psi_comp import PSICOMP_RBF from .psi_comp.rbf_psi_gpucomp import PSICOMP_RBF_GPU from ...util.config import * +from ...core import Param +from GPy.core.parameterization.transformations import Logexp class RBF(Stationary): """ @@ -18,12 +20,17 @@ class RBF(Stationary): """ _support_GPU = True - def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='rbf', useGPU=False): + def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='rbf', useGPU=False, inv_l=False): super(RBF, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name, useGPU=useGPU) if self.useGPU: self.psicomp = PSICOMP_RBF_GPU() else: self.psicomp = PSICOMP_RBF() + self.use_invLengthscale = inv_l + if inv_l: + self.unlink_parameter(self.lengthscale) + self.inv_l = Param('inv_lengthscale',1./self.lengthscale**2, Logexp()) + self.link_parameter(self.inv_l) def K_of_r(self, r): return self.variance * np.exp(-0.5 * r**2) @@ -47,6 +54,10 @@ class RBF(Stationary): def spectrum(self, omega): assert self.input_dim == 1 #TODO: higher dim spectra? return self.variance*np.sqrt(2*np.pi)*self.lengthscale*np.exp(-self.lengthscale*2*omega**2/2) + + def parameters_changed(self): + if self.use_invLengthscale: self.lengthscale[:] = 1./np.sqrt(self.inv_l+1e-200) + super(RBF,self).parameters_changed() #---------------------------------------# # PSI statistics # @@ -68,10 +79,19 @@ class RBF(Stationary): dL_dvar, dL_dlengscale = self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[:2] self.variance.gradient = dL_dvar self.lengthscale.gradient = dL_dlengscale + if self.use_invLengthscale: + self.inv_l.gradient = dL_dlengscale*(self.lengthscale**3/-2.) def gradients_Z_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior): return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[2] def gradients_qX_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior): return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[3:] + + def update_gradients_diag(self, dL_dKdiag, X): + super(RBF,self).update_gradients_diag(dL_dKdiag, X) + if self.use_invLengthscale: self.inv_l.gradient =self.lengthscale.gradient*(self.lengthscale**3/-2.) + def update_gradients_full(self, dL_dK, X, X2=None): + super(RBF,self).update_gradients_full(dL_dK, X, X2) + if self.use_invLengthscale: self.inv_l.gradient =self.lengthscale.gradient*(self.lengthscale**3/-2.)